diff --git a/.dockerignore b/.dockerignore index 701263f5fedded..602b46750d3708 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,7 +1,11 @@ **/node_modules/ */build/ */*/build/ -*/venv/ +**/venv/ +**/.tox/ +**/.mypy_cache/ +**/.pytest_cache/ +**/__pycache__/ out **/*.class # Have to copy gradle/wrapper/gradle-wrapper.jar, can't exclude ALL jars diff --git a/.drone.yml b/.drone.yml index 44f1a4aaffc426..41582a8d61ecde 100644 --- a/.drone.yml +++ b/.drone.yml @@ -3,7 +3,7 @@ name: default anchors: build_frontend: &build_frontend - image: gradle:jdk11 + image: gradle:jdk17 commands: - cd "/drone/src/$${ENVIRONMENT}" - sed -i "s//$AMPLITUDE_API_KEY/g" datahub-web-react/src/conf/analytics.ts diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml new file mode 100644 index 00000000000000..0dcbdcecf34add --- /dev/null +++ b/.github/actions/ci-optimization/action.yml @@ -0,0 +1,87 @@ +name: 'Identify CI Optimizations' +description: 'Determine if code changes are specific to certain modules.' + +outputs: + frontend-only: + description: "Frontend only change" + value: ${{ steps.filter.outputs.frontend == 'true' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'false' }} + ingestion-only: + description: "Ingestion only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'true' && steps.filter.outputs.backend == 'false' }} + backend-only: + description: "Backend only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'true' }} + backend-change: + description: "Backend code has changed" + value: ${{ steps.filter.outputs.backend == 'true' }} + ingestion-change: + description: "Ingestion code has changed" + value: ${{ steps.filter.outputs.ingestion == 'true' }} + frontend-change: + description: "Frontend code has changed" + value: ${{ steps.filter.outputs.frontend == 'true' }} + docker-change: + description: "Docker code has changed" + value: ${{ steps.filter.outputs.docker == 'true' }} + kafka-setup-change: + description: "Kafka setup docker change" + value: ${{ steps.filter.outputs.kafka-setup == 'true' }} + mysql-setup-change: + description: "Mysql setup docker change" + value: ${{ steps.filter.outputs.mysql-setup == 'true' }} + postgres-setup-change: + description: "Postgres setup docker change" + value: ${{ steps.filter.outputs.postgres-setup == 'true' }} + elasticsearch-setup-change: + description: "Elasticsearch setup docker change" + value: ${{ steps.filter.outputs.elasticsearch-setup == 'true' }} + smoke-test-change: + description: "Smoke test change" + value: ${{ steps.filter.outputs.smoke-test == 'true' }} +runs: + using: "composite" + steps: + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + frontend: + - "datahub-frontend/**" + - "datahub-web-react/**" + - "smoke-test/tests/cypress/**" + - "docker/datahub-frontend/**" + ingestion: + - "metadata-ingestion-modules/**" + - "metadata-ingestion/**" + - "metadata-models/**" + - "smoke-test/**" + - "docker/datahub-ingestion**" + docker: + - "docker/**" + backend: + - ".github/**" + - "metadata-models/**" + - "datahub-upgrade/**" + - "entity-registry/**" + - "li-utils/**" + - "metadata-auth/**" + - "metadata-dao-impl/**" + - "metadata-events/**" + - "metadata-io/**" + - "metadata-jobs/**" + - "metadata-service/**" + - "metadata-utils/**" + - "metadata-operation-context/**" + - "datahub-graphql-core/**" + - "smoke-test/**" + - "docker/**" + kafka-setup: + - "docker/kafka-setup/**" + mysql-setup: + - "docker/mysql-setup/**" + postgres-setup: + - "docker/postgres-setup/**" + elasticsearch-setup: + - "docker/elasticsearch-setup/**" + smoke-test: + - "smoke-test/**" diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index ca0796180cd573..3f8ea7a4c88ebd 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -20,7 +20,7 @@ inputs: required: false images: - # e.g. linkedin/datahub-gms + # e.g. acryldata/datahub-gms description: "List of Docker images to use as base name for tags" required: true build-args: diff --git a/.github/scripts/check_python_package.py b/.github/scripts/check_python_package.py new file mode 100644 index 00000000000000..f1f30056917006 --- /dev/null +++ b/.github/scripts/check_python_package.py @@ -0,0 +1,18 @@ +import setuptools + +folders = ["./smoke-test/tests"] + +for folder in folders: + print(f"Checking folder {folder}") + a = [i for i in setuptools.find_packages(folder) if "cypress" not in i] + b = [i for i in setuptools.find_namespace_packages(folder) if "cypress" not in i] + + in_a_not_b = set(a) - set(b) + in_b_not_a = set(b) - set(a) + + assert ( + len(in_a_not_b) == 0 + ), f"Found packages in {folder} that are not in namespace packages: {in_a_not_b}" + assert ( + len(in_b_not_a) == 0 + ), f"Found namespace packages in {folder} that are not in packages: {in_b_not_a}" diff --git a/.github/scripts/docker_helpers.sh b/.github/scripts/docker_helpers.sh index 334465532db06b..0487c69eee0ef4 100755 --- a/.github/scripts/docker_helpers.sh +++ b/.github/scripts/docker_helpers.sh @@ -24,7 +24,7 @@ function get_tag_full { } function get_python_docker_release_v { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},0.0.0+docker.${SHORT_SHA},g" -e 's,refs/tags/v\(.*\),\1+docker,g' -e 's,refs/pull/\([0-9]*\).*,0.0.0+docker.pr\1,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},1!0.0.0+docker.${SHORT_SHA},g" -e 's,refs/tags/v\(.*\),1!\1+docker,g' -e 's,refs/pull/\([0-9]*\).*,1!0.0.0+docker.pr\1,g') } function get_unique_tag { @@ -37,4 +37,4 @@ function get_unique_tag_slim { function get_unique_tag_full { echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') -} \ No newline at end of file +} diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index d0c0f52781b9af..21fd547114872a 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -32,6 +32,7 @@ jobs: strategy: matrix: include: + # Note: this should be kept in sync with tox.ini. - python-version: "3.8" extra_pip_requirements: "apache-airflow~=2.1.4" extra_pip_extras: plugin-v1 @@ -39,17 +40,26 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.4.0" + extra_pip_requirements: "apache-airflow==2.4.3" + extra_pip_extras: plugin-v2,test-airflow24 + - python-version: "3.10" + extra_pip_requirements: 'apache-airflow==2.6.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.6.0" + extra_pip_requirements: 'apache-airflow==2.7.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0" + extra_pip_requirements: 'apache-airflow==2.8.1 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt' extra_pip_extras: plugin-v2 fail-fast: false steps: - - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 + - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -57,7 +67,7 @@ jobs: - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Install airflow package and test (extras ${{ matrix.extra_pip_requirements }}) - run: ./gradlew -Pextra_pip_requirements='${{ matrix.extra_pip_requirements }}' -Pextra_pip_extras='${{ matrix.extra_pip_extras }}' :metadata-ingestion-modules:airflow-plugin:lint :metadata-ingestion-modules:airflow-plugin:testQuick + run: ./gradlew -Pextra_pip_requirements='${{ matrix.extra_pip_requirements }}' -Pextra_pip_extras='${{ matrix.extra_pip_extras }}' :metadata-ingestion-modules:airflow-plugin:build - name: pip freeze show list installed if: always() run: source metadata-ingestion-modules/airflow-plugin/venv/bin/activate && pip freeze @@ -69,6 +79,7 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Upload coverage to Codecov if: always() uses: codecov/codecov-action@v3 @@ -76,8 +87,8 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} directory: . fail_ci_if_error: false - flags: airflow-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} - name: pytest-airflow + flags: airflow,airflow-${{ matrix.extra_pip_extras }} + name: pytest-airflow-${{ matrix.python-version }}-${{ matrix.extra_pip_requirements }} verbose: true event-file: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 10c137a206531a..3f37fffc599bb7 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -20,6 +20,25 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 + - uses: ./.github/actions/ci-optimization + id: ci-optimize + build: strategy: fail-fast: false @@ -29,34 +48,57 @@ jobs: "except_metadata_ingestion", "frontend", ] - timezone: ["UTC", "America/New_York"] + timezone: ["UTC"] + include: + # We only need the timezone variation for frontend tests. + - command: "frontend" + timezone: "America/New_York" runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - - uses: szenius/set-timezone@v1.0 + - uses: szenius/set-timezone@v1.2 with: timezoneLinux: ${{ matrix.timezone }} - - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" cache: pip - - name: Gradle build (and test) for metadata ingestion - # we only need the timezone runs for frontend tests - if: ${{ matrix.command == 'except_metadata_ingestion' && matrix.timezone == 'America/New_York' }} + - name: Gradle build (and test) for NOT metadata ingestion + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} run: | - ./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel + ./gradlew build \ + -x :metadata-ingestion:build \ + -x :metadata-ingestion:check \ + -x :docs-website:build \ + -x :metadata-integration:java:spark-lineage:test \ + -x :metadata-io:test \ + -x :metadata-ingestion-modules:airflow-plugin:build \ + -x :metadata-ingestion-modules:airflow-plugin:check \ + -x :metadata-ingestion-modules:dagster-plugin:build \ + -x :metadata-ingestion-modules:dagster-plugin:check \ + -x :datahub-frontend:build \ + -x :datahub-web-react:build \ + --parallel - name: Gradle build (and test) for frontend - if: ${{ matrix.command == 'frontend' }} + if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }} run: | ./gradlew :datahub-frontend:build :datahub-web-react:build --parallel env: NODE_OPTIONS: "--max-old-space-size=3072" + - name: Gradle compile (jdk8) for legacy Spark + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} + run: | + ./gradlew -PjavaClassVersionDefault=8 :metadata-integration:java:spark-lineage:compileJava - uses: actions/upload-artifact@v3 if: always() with: @@ -65,27 +107,20 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Ensure codegen is updated uses: ./.github/actions/ensure-codegen-updated - - name: Slack failure notification - if: failure() && github.event_name == 'push' - uses: kpritam/slack-job-status-action@v1 - with: - job-status: ${{ job.status }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel: github-activities quickstart-compose-validation: runs-on: ubuntu-latest + needs: setup + if: ${{ needs.setup.outputs.docker_change == 'true' }} steps: - - uses: actions/checkout@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Download YQ - uses: chrisdickinson/setup-yq@v1.0.1 - with: - yq-version: v4.28.2 - name: Quickstart Compose Validation run: ./docker/quickstart/generate_and_compare.sh diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 8e507ea40fd963..ca3510217f74a1 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -27,12 +27,13 @@ jobs: command: ["datahub-client", "datahub-protobuf", "spark-lineage"] runs-on: ubuntu-latest steps: - - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/code-checks.yml b/.github/workflows/code-checks.yml index 38f0946678034b..b3b94cc40a2fdd 100644 --- a/.github/workflows/code-checks.yml +++ b/.github/workflows/code-checks.yml @@ -31,7 +31,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/dagster-plugin.yml b/.github/workflows/dagster-plugin.yml new file mode 100644 index 00000000000000..48f1b24196c9e0 --- /dev/null +++ b/.github/workflows/dagster-plugin.yml @@ -0,0 +1,85 @@ +name: Dagster Plugin +on: + push: + branches: + - master + paths: + - ".github/workflows/dagster-plugin.yml" + - "metadata-ingestion-modules/dagster-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + pull_request: + branches: + - master + paths: + - ".github/**" + - "metadata-ingestion-modules/dagster-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + release: + types: [published] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + dagster-plugin: + runs-on: ubuntu-latest + env: + SPARK_VERSION: 3.0.3 + DATAHUB_TELEMETRY_ENABLED: false + strategy: + matrix: + python-version: ["3.8", "3.10"] + include: + - python-version: "3.8" + extraPythonRequirement: "dagster>=1.3.3" + - python-version: "3.10" + extraPythonRequirement: "dagster>=1.3.3" + fail-fast: false + steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh + - name: Install dagster package and test (extras ${{ matrix.extraPythonRequirement }}) + run: ./gradlew -Pextra_pip_requirements='${{ matrix.extraPythonRequirement }}' :metadata-ingestion-modules:dagster-plugin:lint :metadata-ingestion-modules:dagster-plugin:testQuick + - name: pip freeze show list installed + if: always() + run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && pip freeze + - uses: actions/upload-artifact@v3 + if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }} + with: + name: Test Results (dagster Plugin ${{ matrix.python-version}}) + path: | + **/build/reports/tests/test/** + **/build/test-results/test/** + **/junit.*.xml + - name: Upload coverage to Codecov + if: always() + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + directory: . + fail_ci_if_error: false + flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} + name: pytest-dagster + verbose: true + + event-file: + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/docker-ingestion-smoke.yml b/.github/workflows/docker-ingestion-smoke.yml index 803ddc6fcec751..e3c37b45434ab1 100644 --- a/.github/workflows/docker-ingestion-smoke.yml +++ b/.github/workflows/docker-ingestion-smoke.yml @@ -25,7 +25,7 @@ jobs: python_release_version: ${{ steps.python_release_version.outputs.release_version }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -50,7 +50,7 @@ jobs: if: ${{ needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: diff --git a/.github/workflows/docker-postgres-setup.yml b/.github/workflows/docker-postgres-setup.yml index e4d6cfc106f812..956f3f7b1c3903 100644 --- a/.github/workflows/docker-postgres-setup.yml +++ b/.github/workflows/docker-postgres-setup.yml @@ -27,7 +27,7 @@ jobs: publish: ${{ steps.publish.outputs.publish }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -46,7 +46,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index fef23f9efa85f1..5e1409003c4765 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -3,15 +3,9 @@ on: push: branches: - master - paths-ignore: - - "docs/**" - - "**.md" pull_request: branches: - "**" - paths-ignore: - - "docs/**" - - "**.md" release: types: [published] @@ -23,12 +17,12 @@ concurrency: cancel-in-progress: true env: - DATAHUB_GMS_IMAGE: "linkedin/datahub-gms" - DATAHUB_FRONTEND_IMAGE: "linkedin/datahub-frontend-react" - DATAHUB_MAE_CONSUMER_IMAGE: "linkedin/datahub-mae-consumer" - DATAHUB_MCE_CONSUMER_IMAGE: "linkedin/datahub-mce-consumer" - DATAHUB_KAFKA_SETUP_IMAGE: "linkedin/datahub-kafka-setup" - DATAHUB_ELASTIC_SETUP_IMAGE: "linkedin/datahub-elasticsearch-setup" + DATAHUB_GMS_IMAGE: "acryldata/datahub-gms" + DATAHUB_FRONTEND_IMAGE: "acryldata/datahub-frontend-react" + DATAHUB_MAE_CONSUMER_IMAGE: "acryldata/datahub-mae-consumer" + DATAHUB_MCE_CONSUMER_IMAGE: "acryldata/datahub-mce-consumer" + DATAHUB_KAFKA_SETUP_IMAGE: "acryldata/datahub-kafka-setup" + DATAHUB_ELASTIC_SETUP_IMAGE: "acryldata/datahub-elasticsearch-setup" DATAHUB_MYSQL_SETUP_IMAGE: "acryldata/datahub-mysql-setup" DATAHUB_UPGRADE_IMAGE: "acryldata/datahub-upgrade" DATAHUB_INGESTION_BASE_IMAGE: "acryldata/datahub-ingestion-base" @@ -49,9 +43,19 @@ jobs: short_sha: ${{ steps.tag.outputs.short_sha }} branch_name: ${{ steps.tag.outputs.branch_name }} repository_name: ${{ steps.tag.outputs.repository_name }} + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + backend_only: ${{ steps.ci-optimize.outputs.backend-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} steps: - - name: Checkout - uses: actions/checkout@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -69,16 +73,48 @@ jobs: - name: Check whether publishing enabled id: publish env: - ENABLE_PUBLISH: ${{ secrets.DOCKER_PASSWORD != '' && secrets.ACRYL_DOCKER_PASSWORD != '' }} + ENABLE_PUBLISH: ${{ secrets.ACRYL_DOCKER_PASSWORD != '' }} run: | echo "Enable publish: ${{ env.ENABLE_PUBLISH }}" echo "publish=${{ env.ENABLE_PUBLISH }}" >> $GITHUB_OUTPUT + - uses: ./.github/actions/ci-optimization + id: ci-optimize + - uses: actions/setup-python@v4 + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + with: + python-version: "3.10" + cache: "pip" + - uses: actions/cache@v4 + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + with: + path: | + ~/.cache/uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} + - name: Set up JDK 17 + uses: actions/setup-java@v3 + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 + - name: Run lint on smoke test + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + run: | + python ./.github/scripts/check_python_package.py + ./gradlew :smoke-test:lint gms_build: name: Build and Push DataHub GMS Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -91,8 +127,8 @@ jobs: images: | ${{ env.DATAHUB_GMS_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish }} context: . file: ./docker/datahub-gms/Dockerfile @@ -105,9 +141,10 @@ jobs: name: "[Monitoring] Scan GMS images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, gms_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.setup.outputs.publish != 'true' }} @@ -134,7 +171,14 @@ jobs: name: Build and Push DataHub MAE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -147,8 +191,8 @@ jobs: images: | ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish }} context: . file: ./docker/datahub-mae-consumer/Dockerfile @@ -157,13 +201,14 @@ jobs: name: "[Monitoring] Scan MAE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mae_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.setup.outputs.publish != 'true' }} @@ -190,9 +235,16 @@ jobs: name: Build and Push DataHub MCE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-jobs:mce-consumer-job:build -x test --parallel @@ -203,8 +255,8 @@ jobs: images: | ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish }} context: . file: ./docker/datahub-mce-consumer/Dockerfile @@ -213,13 +265,14 @@ jobs: name: "[Monitoring] Scan MCE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mce_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.setup.outputs.publish != 'true' }} @@ -246,9 +299,16 @@ jobs: name: Build and Push DataHub Upgrade Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-upgrade:build -x test --parallel @@ -269,13 +329,14 @@ jobs: name: "[Monitoring] Scan DataHub Upgrade images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_upgrade_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.setup.outputs.publish != 'true' }} @@ -302,9 +363,16 @@ jobs: name: Build and Push DataHub Frontend Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel @@ -317,8 +385,8 @@ jobs: images: | ${{ env.DATAHUB_FRONTEND_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish }} context: . file: ./docker/datahub-frontend/Dockerfile @@ -327,6 +395,7 @@ jobs: name: "[Monitoring] Scan Frontend images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, frontend_build] + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -360,17 +429,18 @@ jobs: name: Build and Push DataHub Kafka Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: images: | ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish }} context: . file: ./docker/kafka-setup/Dockerfile @@ -380,9 +450,10 @@ jobs: name: Build and Push DataHub MySQL Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: @@ -400,17 +471,18 @@ jobs: name: Build and Push DataHub Elasticsearch Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: images: | ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} publish: ${{ needs.setup.outputs.publish }} context: . file: ./docker/elasticsearch-setup/Dockerfile @@ -422,9 +494,10 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: setup + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - uses: dorny/paths-filter@v2 id: filter with: @@ -454,9 +527,10 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - uses: dorny/paths-filter@v2 id: filter with: @@ -494,9 +568,10 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - uses: dorny/paths-filter@v2 id: filter with: @@ -536,9 +611,16 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - uses: dorny/paths-filter@v2 id: filter with: @@ -585,9 +667,10 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion Slim images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image Slim Image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} @@ -617,9 +700,16 @@ jobs: tag: ${{ steps.tag.outputs.tag }} needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 - name: Check out the repo - uses: hsheth2/sane-checkout-action@v1 + uses: acryldata/sane-checkout-action@v3 - uses: dorny/paths-filter@v2 id: filter with: @@ -665,9 +755,10 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image Full Image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.datahub_ingestion_full_build.outputs.needs_artifact_download == 'true' }} @@ -690,21 +781,31 @@ jobs: with: sarif_file: "trivy-results.sarif" + smoke_test_matrix: + runs-on: ubuntu-latest + needs: setup + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - id: set-matrix + run: | + if [ '${{ needs.setup.outputs.frontend_only }}' == 'true' ]; then + echo 'matrix=["cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.ingestion_only }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.backend_change }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1","cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + else + echo 'matrix=[]' >> $GITHUB_OUTPUT + fi + smoke_test: name: Run Smoke Tests runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - test_strategy: [ - "no_cypress_suite0", - "no_cypress_suite1", - "cypress_suite1", - "cypress_rest" - ] needs: [ setup, + smoke_test_matrix, gms_build, frontend_build, kafka_setup_build, @@ -715,16 +816,22 @@ jobs: datahub_upgrade_build, datahub_ingestion_slim_build, ] + strategy: + fail-fast: false + matrix: + test_strategy: ${{ fromJson(needs.smoke_test_matrix.outputs.matrix) }} + if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }} steps: - name: Disk Check run: df -h . && docker images - name: Check out the repo - uses: actions/checkout@v3 - - name: Set up JDK 11 + uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" @@ -742,57 +849,99 @@ jobs: run: df -h . && docker images - name: Download GMS image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.gms_build.result == 'success' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Frontend image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.frontend_build.result == 'success' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Kafka Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.kafka_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Mysql Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mysql_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Elastic Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.elasticsearch_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MCE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mce_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MAE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.mae_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download upgrade image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.datahub_upgrade_build.result == 'success' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download datahub-ingestion-slim image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} + if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' && needs.datahub_ingestion_slim_build.result == 'success' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }} - name: Disk Check run: df -h . && docker images + - name: CI Optimization Head Images + # When publishing all tests/images are built (no optimizations) + if: ${{ needs.setup.outputs.publish != 'true' }} + run: | + if [ '${{ needs.setup.outputs.backend_change }}' == 'false' ]; then + echo 'GMS/Upgrade/MCE/MAE head images' + docker pull '${{ env.DATAHUB_GMS_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' + docker tag '${{ env.DATAHUB_GMS_IMAGE }}:head' '${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' '${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.frontend_change }}' == 'false' ]; then + echo 'Frontend head images' + docker pull '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' + docker tag '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' '${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.kafka_setup_change }}' == 'false' ]; then + echo 'kafka-setup head images' + docker pull '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.mysql_setup_change }}' == 'false' ]; then + echo 'mysql-setup head images' + docker pull '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.elasticsearch_setup_change }}' == 'false' ]; then + echo 'elasticsearch-setup head images' + docker pull '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.ingestion_change }}' == 'false' ]; then + echo 'datahub-ingestion head-slim images' + docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' + if [ '${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }}' != 'head-slim' ]; then + docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }}' + fi + fi - name: run quickstart env: DATAHUB_TELEMETRY_ENABLED: false DATAHUB_VERSION: ${{ needs.setup.outputs.unique_tag }} DATAHUB_ACTIONS_IMAGE: ${{ env.DATAHUB_INGESTION_IMAGE }} - ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag }} + ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }} ACTIONS_EXTRA_PACKAGES: "acryl-datahub-actions[executor]==0.0.13 acryl-datahub-actions==0.0.13 acryl-datahub==0.10.5" ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | @@ -831,14 +980,14 @@ jobs: if: failure() run: | docker ps -a - docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true - docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true - docker logs datahub-mae-consumer >& mae-${{ matrix.test_strategy }}.log || true - docker logs datahub-mce-consumer >& mce-${{ matrix.test_strategy }}.log || true - docker logs broker >& broker-${{ matrix.test_strategy }}.log || true - docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true - docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true - docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-gms-1 >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-actions-1 >& actions-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-mae-consumer-1 >& mae-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-mce-consumer-1 >& mce-${{ matrix.test_strategy }}.log || true + docker logs datahub-broker-1 >& broker-${{ matrix.test_strategy }}.log || true + docker logs datahub-mysql-1 >& mysql-${{ matrix.test_strategy }}.log || true + docker logs datahub-elasticsearch-1 >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-frontend-react-1 >& frontend-${{ matrix.test_strategy }}.log || true - name: Upload logs uses: actions/upload-artifact@v3 if: failure() @@ -859,30 +1008,20 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml - - name: Slack failure notification - if: failure() && github.event_name == 'push' - uses: kpritam/slack-job-status-action@v1 - with: - job-status: ${{ job.status }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel: github-activities + !**/binary/** deploy_datahub_head: name: Deploy to Datahub HEAD runs-on: ubuntu-latest - needs: - [ - setup, - smoke_test - ] + needs: [setup, smoke_test] steps: - uses: aws-actions/configure-aws-credentials@v1 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} aws-region: us-west-2 - uses: isbang/sqs-action@v0.2.0 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index c94282938120e4..ab7890ad5b44f9 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -7,6 +7,7 @@ on: paths: - "metadata-ingestion/**" - "metadata-models/**" + - "docs/**" - "docs-website/**" push: branches: @@ -14,6 +15,7 @@ on: paths: - "metadata-ingestion/**" - "metadata-models/**" + - "docs/**" - "docs-website/**" # release: # types: [published, edited] @@ -26,12 +28,17 @@ jobs: gh-pages: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 + # We explicitly don't use acryldata/sane-checkout-action because docusaurus runs + # git commands to determine the last change date for each file. + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/lint-actions.yml b/.github/workflows/lint-actions.yml index 6f34bf292bf51a..4d83adbeba08a1 100644 --- a/.github/workflows/lint-actions.yml +++ b/.github/workflows/lint-actions.yml @@ -10,7 +10,7 @@ jobs: actionlint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: acryldata/sane-checkout-action@v3 - uses: reviewdog/action-actionlint@v1 with: reporter: github-pr-review diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index ec6bd4141cc6fc..c978a8ff09b5e6 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -6,6 +6,7 @@ on: paths: - ".github/workflows/metadata-ingestion.yml" - "metadata-ingestion/**" + - "metadata-ingestion-modules/**" - "metadata-models/**" pull_request: branches: @@ -13,6 +14,7 @@ on: paths: - ".github/workflows/metadata-ingestion.yml" - "metadata-ingestion/**" + - "metadata-ingestion-modules/**" - "metadata-models/**" release: types: [published] @@ -31,7 +33,7 @@ jobs: # DATAHUB_LOOKML_GIT_TEST_SSH_KEY: ${{ secrets.DATAHUB_LOOKML_GIT_TEST_SSH_KEY }} strategy: matrix: - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.10"] command: [ "testQuick", @@ -40,11 +42,26 @@ jobs: "testIntegrationBatch2", ] include: - - python-version: "3.7" + - python-version: "3.8" - python-version: "3.10" fail-fast: false steps: - - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 + - uses: acryldata/sane-checkout-action@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - uses: actions/cache@v4 + with: + path: | + ~/.cache/uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -73,6 +90,7 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Upload coverage to Codecov if: ${{ always() && matrix.python-version == '3.10' }} uses: codecov/codecov-action@v3 diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 48f230ce14c8db..c59675cf006142 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -24,23 +24,42 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 + - uses: ./.github/actions/ci-optimization + id: ci-optimize build: runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" + cache: "pip" - name: Gradle build (and test) - # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs - # running build first without datahub-web-react:yarnBuild and then with it is 100% stable - # datahub-frontend:unzipAssets depends on datahub-web-react:yarnBuild but gradle does not know about it run: | ./gradlew :metadata-io:test - uses: actions/upload-artifact@v3 @@ -51,6 +70,7 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Ensure codegen is updated uses: ./.github/actions/ensure-codegen-updated diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index eb098a327e4cb5..239d2d991915c0 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -29,10 +29,17 @@ jobs: runs-on: ubuntu-latest needs: setup steps: - - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/gradle-build-action@v2 + - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Run model generation diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index 82bb90f68f4c32..4fdb4a5b92e4d6 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -14,3 +14,71 @@ jobs: with: repo-token: "${{ secrets.GITHUB_TOKEN }}" configuration-path: ".github/pr-labeler-config.yml" + - uses: actions-ecosystem/action-add-labels@v1.1.0 + # only add names of Acryl Data team members here + if: + ${{ + !contains( + fromJson('[ + "anshbansal", + "asikowitz", + "chriscollins3456", + "david-leifker", + "shirshanka", + "sid-acryl", + "swaroopjagadish", + "treff7es", + "yoonhyejin", + "eboneil", + "ethan-cartwright", + "gabe-lyons", + "hsheth2", + "jjoyce0510", + "maggiehays", + "mrjefflewis", + "pedro93", + "RyanHolstien", + "Kunal-kankriya", + "purnimagarg1", + "gaurav2733", + "dushayntAW", + "AvaniSiddhapuraAPT", + "akarsh991", + "shubhamjagtap639", + "mayurinehate" + ]'), + github.actor + ) + }} + with: + github_token: ${{ github.token }} + labels: | + community-contribution + - uses: actions-ecosystem/action-add-labels@v1.1.0 + # only add names of champions here. Confirm with DevRel Team + if: + ${{ + contains( + fromJson('[ + "skrydal", + "siladitya2", + "sgomezvillamor", + "ngamanda", + "HarveyLeo", + "frsann", + "bossenti", + "nikolakasev", + "PatrickfBraz", + "cuong-pham", + "sudhakarast", + "tkdrahn", + "rtekal", + "sgm44" + ]'), + github.actor + ) + }} + with: + github_token: ${{ github.token }} + labels: | + datahub-community-champion diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index ec7985ef3b3d03..3541ed6c29d62b 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -36,7 +36,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -48,15 +48,17 @@ jobs: needs: ["check-secret", "setup"] if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: - - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: checkout upstream repo run: | git remote add upstream https://github.com/datahub-project/datahub.git diff --git a/.github/workflows/qodana-scan.yml b/.github/workflows/qodana-scan.yml index 022ec9e6eda6a9..750cf24ad38e57 100644 --- a/.github/workflows/qodana-scan.yml +++ b/.github/workflows/qodana-scan.yml @@ -14,7 +14,7 @@ jobs: qodana: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: acryldata/sane-checkout-action@v3 - name: "Qodana Scan" uses: JetBrains/qodana-action@v2022.3.4 - uses: github/codeql-action/upload-sarif@v2 diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 70b66d6452b266..d143906bc7ccb6 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -29,19 +29,25 @@ jobs: spark-smoke-test: runs-on: ubuntu-latest steps: - - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/gradle-build-action@v2 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh + - name: Disk Check + run: df -h . && docker images - name: Remove images run: docker image prune -a -f || true + - name: Disk Check + run: df -h . && docker images - name: Smoke test run: | ./gradlew :metadata-integration:java:spark-lineage:integrationTest \ @@ -49,10 +55,27 @@ jobs: -x :datahub-web-react:yarnLint \ -x :datahub-web-react:yarnGenerate \ -x :datahub-web-react:yarnInstall \ - -x :datahub-web-react:yarnQuickBuild \ - -x :datahub-web-react:copyAssets \ + -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar + - name: store logs + if: failure() + run: | + docker ps -a + docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true + docker logs broker >& broker-${{ matrix.test_strategy }}.log || true + docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true + docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true + - name: Upload logs + uses: actions/upload-artifact@v3 + if: failure() + with: + name: docker logs + path: | + "**/build/container-logs/*.log" + "*.log" - uses: actions/upload-artifact@v3 if: always() with: @@ -61,10 +84,4 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml - - name: Slack failure notification - if: failure() && github.event_name == 'push' - uses: kpritam/slack-job-status-action@v1 - with: - job-status: ${{ job.status }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel: github-activities + !**/binary/** diff --git a/.github/workflows/test-results.yml b/.github/workflows/test-results.yml index 0153060692271c..c94a5fc340f473 100644 --- a/.github/workflows/test-results.yml +++ b/.github/workflows/test-results.yml @@ -2,7 +2,7 @@ name: Test Results on: workflow_run: - workflows: ["build & test", "metadata ingestion", "Airflow Plugin"] + workflows: ["build & test", "metadata ingestion", "Airflow Plugin", "Dagster Plugin"] types: - completed diff --git a/README.md b/README.md index 6b8fa520e432ef..dddb32da73f237 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ HOSTED_DOCS_ONLY--> [![Version](https://img.shields.io/github/v/release/datahub-project/datahub?include_prereleases)](https://github.com/datahub-project/datahub/releases/latest) [![PyPI version](https://badge.fury.io/py/acryl-datahub.svg)](https://badge.fury.io/py/acryl-datahub) [![build & test](https://github.com/datahub-project/datahub/workflows/build%20&%20test/badge.svg?branch=master&event=push)](https://github.com/datahub-project/datahub/actions?query=workflow%3A%22build+%26+test%22+branch%3Amaster+event%3Apush) -[![Docker Pulls](https://img.shields.io/docker/pulls/linkedin/datahub-gms.svg)](https://hub.docker.com/r/linkedin/datahub-gms) +[![Docker Pulls](https://img.shields.io/docker/pulls/acryldata/datahub-gms.svg)](https://hub.docker.com/r/acryldata/datahub-gms) [![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.datahubproject.io) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/datahub-project/datahub/blob/master/docs/CONTRIBUTING.md) [![GitHub commit activity](https://img.shields.io/github/commit-activity/m/datahub-project/datahub)](https://github.com/datahub-project/datahub/pulls?q=is%3Apr) diff --git a/build.gradle b/build.gradle index f5e5403e822e77..5cf43755fceffe 100644 --- a/build.gradle +++ b/build.gradle @@ -1,48 +1,81 @@ buildscript { + ext.jdkVersionDefault = 17 + ext.javaClassVersionDefault = 11 + + ext.jdkVersion = { p -> + // If Spring 6 is present, hard dependency on jdk17 + if (p.configurations.any { it.getDependencies().any{ + (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) + || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) + }}) { + return 17 + } else { + // otherwise we can use the preferred default which can be overridden with a property: -PjdkVersionDefault + return p.hasProperty('jdkVersionDefault') ? Integer.valueOf((String) p.getProperty('jdkVersionDefault')) : ext.jdkVersionDefault + } + } + + ext.javaClassVersion = { p -> + // If Spring 6 is present, hard dependency on jdk17 + if (p.configurations.any { it.getDependencies().any { + (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) + || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) + }}) { + return 17 + } else { + // otherwise we can use the preferred default which can be overridden with a property: -PjavaClassVersionDefault + return p.hasProperty('javaClassVersionDefault') ? Integer.valueOf((String) p.getProperty('javaClassVersionDefault')) : ext.javaClassVersionDefault + } + } + ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.46.8' + ext.pegasusVersion = '29.51.6' ext.mavenVersion = '3.6.3' - ext.springVersion = '5.3.29' - ext.springBootVersion = '2.7.14' + ext.springVersion = '6.1.4' + ext.springBootVersion = '3.2.3' + ext.springKafkaVersion = '3.1.2' ext.openTelemetryVersion = '1.18.0' - ext.neo4jVersion = '4.4.9' - ext.neo4jTestVersion = '4.4.25' - ext.neo4jApocVersion = '4.4.0.20:all' + ext.neo4jVersion = '5.14.0' + ext.neo4jTestVersion = '5.14.0' + ext.neo4jApocVersion = '5.14.0' ext.testContainersVersion = '1.17.4' - ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x - ext.jacksonVersion = '2.15.2' - ext.jettyVersion = '9.4.46.v20220331' - ext.playVersion = '2.8.18' + ext.elasticsearchVersion = '2.11.1' // ES 7.10, Opensearch 1.x, 2.x + ext.jacksonVersion = '2.15.3' + ext.jettyVersion = '11.0.19' + ext.playVersion = '2.8.21' ext.log4jVersion = '2.19.0' ext.slf4jVersion = '1.7.36' - ext.logbackClassic = '1.2.12' + ext.logbackClassic = '1.4.14' ext.hadoop3Version = '3.3.5' - ext.kafkaVersion = '2.3.0' + ext.kafkaVersion = '5.5.15' ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' ext.googleJavaFormatVersion = '1.18.1' + ext.openLineageVersion = '1.5.0' + ext.logbackClassicJava8 = '1.2.12' - ext.docker_registry = 'linkedin' + ext.docker_registry = 'acryldata' apply from: './repositories.gradle' buildscript.repositories.addAll(project.repositories) dependencies { classpath 'com.linkedin.pegasus:gradle-plugins:' + pegasusVersion - classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4' + classpath 'com.github.node-gradle:gradle-node-plugin:7.0.1' classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.2.0' classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0" classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0" classpath "org.gradle.playframework:gradle-playframework:0.14" - classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1" + classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.2" } } plugins { - id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' - id 'com.github.johnrengelman.shadow' version '6.1.0' + id 'com.gorylenko.gradle-git-properties' version '2.4.1' + id 'com.github.johnrengelman.shadow' version '8.1.1' apply false id 'com.palantir.docker' version '0.35.0' apply false + id 'com.avast.gradle.docker-compose' version '0.17.6' id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" @@ -62,6 +95,7 @@ project.ext.spec = [ 'restliDocgen' : 'com.linkedin.pegasus:restli-docgen:' + pegasusVersion, 'restliServer' : 'com.linkedin.pegasus:restli-server:' + pegasusVersion, 'restliSpringBridge': 'com.linkedin.pegasus:restli-spring-bridge:' + pegasusVersion, + 'restliTestUtils' : 'com.linkedin.pegasus:restli-client-testutils:' + pegasusVersion, ] ] ] @@ -74,7 +108,7 @@ project.ext.externalDependency = [ 'avro': 'org.apache.avro:avro:1.11.3', 'avroCompiler': 'org.apache.avro:avro-compiler:1.11.3', 'awsGlueSchemaRegistrySerde': 'software.amazon.glue:schema-registry-serde:1.1.17', - 'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:1.1.9', + 'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:2.0.3', 'awsSecretsManagerJdbc': 'com.amazonaws.secretsmanager:aws-secretsmanager-jdbc:1.0.13', 'awsPostgresIamAuth': 'software.amazon.jdbc:aws-advanced-jdbc-wrapper:1.0.2', 'awsRds':'software.amazon.awssdk:rds:2.18.24', @@ -97,11 +131,12 @@ project.ext.externalDependency = [ 'elasticSearchRest': 'org.opensearch.client:opensearch-rest-high-level-client:' + elasticsearchVersion, 'elasticSearchJava': 'org.opensearch.client:opensearch-java:2.6.0', 'findbugsAnnotations': 'com.google.code.findbugs:annotations:3.0.1', - 'graphqlJava': 'com.graphql-java:graphql-java:19.5', - 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:19.1', + 'graphqlJava': 'com.graphql-java:graphql-java:21.3', + 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:21.0', 'gson': 'com.google.code.gson:gson:2.8.9', - 'guice': 'com.google.inject:guice:4.2.3', - 'guava': 'com.google.guava:guava:32.1.2-jre', + 'guice': 'com.google.inject:guice:7.0.0', + 'guicePlay': 'com.google.inject:guice:5.0.1', // Used for frontend while still on old Play version + 'guava': 'com.google.guava:guava:32.1.3-jre', 'h2': 'com.h2database:h2:2.2.224', 'hadoopCommon':'org.apache.hadoop:hadoop-common:2.7.2', 'hadoopMapreduceClient':'org.apache.hadoop:hadoop-mapreduce-client-core:2.7.2', @@ -111,7 +146,7 @@ project.ext.externalDependency = [ 'hazelcastSpring':"com.hazelcast:hazelcast-spring:$hazelcastVersion", 'hazelcastTest':"com.hazelcast:hazelcast:$hazelcastVersion:tests", 'hibernateCore': 'org.hibernate:hibernate-core:5.2.16.Final', - 'httpClient': 'org.apache.httpcomponents:httpclient:4.5.9', + 'httpClient': 'org.apache.httpcomponents.client5:httpclient5:5.3', 'httpAsyncClient': 'org.apache.httpcomponents:httpasyncclient:4.1.5', 'iStackCommons': 'com.sun.istack:istack-commons-runtime:4.0.1', 'jacksonJDK8': "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:$jacksonVersion", @@ -123,7 +158,7 @@ project.ext.externalDependency = [ 'javatuples': 'org.javatuples:javatuples:1.2', 'javaxInject' : 'javax.inject:javax.inject:1', 'javaxValidation' : 'javax.validation:validation-api:2.0.1.Final', - 'jerseyCore': 'org.glassfish.jersey.core:jersey-client:2.25.1', + 'jerseyCore': 'org.glassfish.jersey.core:jersey-client:2.41', 'jerseyGuava': 'org.glassfish.jersey.bundles.repackaged:jersey-guava:2.25.1', 'jettyJaas': "org.eclipse.jetty:jetty-jaas:$jettyVersion", 'jettyClient': "org.eclipse.jetty:jetty-client:$jettyVersion", @@ -139,29 +174,31 @@ project.ext.externalDependency = [ 'junitJupiterParams': "org.junit.jupiter:junit-jupiter-params:$junitJupiterVersion", 'junitJupiterEngine': "org.junit.jupiter:junit-jupiter-engine:$junitJupiterVersion", // avro-serde includes dependencies for `kafka-avro-serializer` `kafka-schema-registry-client` and `avro` - 'kafkaAvroSerde': 'io.confluent:kafka-streams-avro-serde:5.5.1', + 'kafkaAvroSerde': "io.confluent:kafka-streams-avro-serde:$kafkaVersion", 'kafkaAvroSerializer': 'io.confluent:kafka-avro-serializer:5.1.4', - 'kafkaClients': "org.apache.kafka:kafka-clients:$kafkaVersion", + 'kafkaClients': "org.apache.kafka:kafka-clients:$kafkaVersion-ccs", 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.4', 'logbackClassic': "ch.qos.logback:logback-classic:$logbackClassic", + 'logbackClassicJava8' : "ch.qos.logback:logback-classic:$logbackClassicJava8", 'slf4jApi': "org.slf4j:slf4j-api:$slf4jVersion", 'log4jCore': "org.apache.logging.log4j:log4j-core:$log4jVersion", 'log4jApi': "org.apache.logging.log4j:log4j-api:$log4jVersion", 'log4j12Api': "org.slf4j:log4j-over-slf4j:$slf4jVersion", 'log4j2Api': "org.apache.logging.log4j:log4j-to-slf4j:$log4jVersion", - 'lombok': 'org.projectlombok:lombok:1.18.16', + 'lombok': 'org.projectlombok:lombok:1.18.30', 'mariadbConnector': 'org.mariadb.jdbc:mariadb-java-client:2.6.0', 'mavenArtifact': "org.apache.maven:maven-artifact:$mavenVersion", 'mixpanel': 'com.mixpanel:mixpanel-java:1.4.4', - 'mockito': 'org.mockito:mockito-core:3.0.0', - 'mockitoInline': 'org.mockito:mockito-inline:3.0.0', + 'mockito': 'org.mockito:mockito-core:4.11.0', + 'mockitoInline': 'org.mockito:mockito-inline:4.11.0', 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', - 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', + 'mysqlConnector': 'mysql:mysql-connector-java:8.0.28', 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, - 'neo4jApoc': 'org.neo4j.procedure:apoc:' + neo4jApocVersion, + 'neo4jApocCore': 'org.neo4j.procedure:apoc-core:' + neo4jApocVersion, + 'neo4jApocCommon': 'org.neo4j.procedure:apoc-common:' + neo4jApocVersion, 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', @@ -180,22 +217,22 @@ project.ext.externalDependency = [ 'playFilters': "com.typesafe.play:filters-helpers_2.12:$playVersion", 'pac4j': 'org.pac4j:pac4j-oidc:4.5.7', 'playPac4j': 'org.pac4j:play-pac4j_2.12:9.0.2', - 'postgresql': 'org.postgresql:postgresql:42.3.8', + 'postgresql': 'org.postgresql:postgresql:42.3.9', 'protobuf': 'com.google.protobuf:protobuf-java:3.19.6', 'grpcProtobuf': 'io.grpc:grpc-protobuf:1.53.0', 'rangerCommons': 'org.apache.ranger:ranger-plugins-common:2.3.0', 'reflections': 'org.reflections:reflections:0.9.9', 'resilience4j': 'io.github.resilience4j:resilience4j-retry:1.7.1', 'rythmEngine': 'org.rythmengine:rythm-engine:1.3.0', - 'servletApi': 'javax.servlet:javax.servlet-api:3.1.0', - 'shiroCore': 'org.apache.shiro:shiro-core:1.11.0', + 'servletApi': 'jakarta.servlet:jakarta.servlet-api:6.0.0', + 'shiroCore': 'org.apache.shiro:shiro-core:1.13.0', 'snakeYaml': 'org.yaml:snakeyaml:2.0', - 'sparkSql' : 'org.apache.spark:spark-sql_2.11:2.4.8', - 'sparkHive' : 'org.apache.spark:spark-hive_2.11:2.4.8', + 'sparkSql' : 'org.apache.spark:spark-sql_2.12:3.0.3', + 'sparkHive' : 'org.apache.spark:spark-hive_2.12:3.0.3', 'springBeans': "org.springframework:spring-beans:$springVersion", 'springContext': "org.springframework:spring-context:$springVersion", 'springCore': "org.springframework:spring-core:$springVersion", - 'springDocUI': 'org.springdoc:springdoc-openapi-ui:1.6.14', + 'springDocUI': 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.3.0', 'springJdbc': "org.springframework:spring-jdbc:$springVersion", 'springWeb': "org.springframework:spring-web:$springVersion", 'springWebMVC': "org.springframework:spring-webmvc:$springVersion", @@ -206,11 +243,11 @@ project.ext.externalDependency = [ 'springBootStarterJetty': "org.springframework.boot:spring-boot-starter-jetty:$springBootVersion", 'springBootStarterCache': "org.springframework.boot:spring-boot-starter-cache:$springBootVersion", 'springBootStarterValidation': "org.springframework.boot:spring-boot-starter-validation:$springBootVersion", - 'springKafka': 'org.springframework.kafka:spring-kafka:2.9.13', + 'springKafka': "org.springframework.kafka:spring-kafka:$springKafkaVersion", 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', - 'testngJava8': 'org.testng:testng:7.5.1', + 'springBootAutoconfigureJdk11': 'org.springframework.boot:spring-boot-autoconfigure:2.7.18', 'testng': 'org.testng:testng:7.8.0', 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, @@ -226,13 +263,81 @@ project.ext.externalDependency = [ 'charle': 'com.charleskorn.kaml:kaml:0.53.0', 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', - 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' + 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0', + 'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2', + 'classGraph': 'io.github.classgraph:classgraph:4.8.165', ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' + + tasks.withType(Test).configureEach { task -> if (task.project.name != "metadata-io") { + // https://docs.gradle.org/current/userguide/performance.html + maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 + + if (project.configurations.getByName("testImplementation").getDependencies() + .any { it.getName().contains("testng") }) { + useTestNG() + } + } + } + + /** + * If making changes to this section also see the sections for pegasus below + * which use project.plugins.hasPlugin('pegasus') + **/ + if (!project.plugins.hasPlugin('pegasus') && (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application'))) { + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + // Puts parameter names into compiled class files, necessary for Spring 6 + options.compilerArgs.add("-parameters") + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + } + + // not duplicated, need to set this outside and inside afterEvaluate + afterEvaluate { + /** + * If making changes to this section also see the sections for pegasus below + * which use project.plugins.hasPlugin('pegasus') + **/ + if (!project.plugins.hasPlugin('pegasus') && (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application'))) { + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + } + } } configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { @@ -264,30 +369,71 @@ subprojects { failOnNoGitDirectory = false } - plugins.withType(JavaPlugin) { + plugins.withType(JavaPlugin).configureEach { + if (project.name == 'datahub-web-react') { + return + } + dependencies { + implementation externalDependency.annotationApi constraints { implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') - implementation('org.apache.commons:commons-compress:1.21') + implementation('org.apache.commons:commons-compress:1.26.0') implementation('org.apache.velocity:velocity-engine-core:2.3') implementation('org.hibernate:hibernate-validator:6.0.20.Final') implementation("com.fasterxml.jackson.core:jackson-databind:$jacksonVersion") implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion") } } + spotless { java { googleJavaFormat() target project.fileTree(project.projectDir) { - include '**/*.java' - exclude 'build/**/*.java' - exclude '**/generated/**/*.*' - exclude '**/mainGeneratedDataTemplate/**/*.*' - exclude '**/mainGeneratedRest/**/*.*' + include 'src/**/*.java' + exclude 'src/**/resources/' + exclude 'src/**/generated/' + exclude 'src/**/mainGeneratedDataTemplate/' + exclude 'src/**/mainGeneratedRest/' + exclude 'src/renamed/avro/' + exclude 'src/test/sample-test-plugins/' + } + } + } + + if (project.plugins.hasPlugin('pegasus')) { + dependencies { + dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 + restClientCompile spec.product.pegasus.restliClient + } + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + // Puts parameter names into compiled class files, necessary for Spring 6 + options.compilerArgs.add("-parameters") + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) } } } + afterEvaluate { def spotlessJavaTask = tasks.findByName('spotlessJava') def processTask = tasks.findByName('processResources') @@ -305,30 +451,23 @@ subprojects { } } - tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(11) - } - } - tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) - } - // https://docs.gradle.org/current/userguide/performance.html - maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 - - if (project.configurations.getByName("testImplementation").getDependencies() - .any{ it.getName().contains("testng") }) { - useTestNG() - } - } - afterEvaluate { if (project.plugins.hasPlugin('pegasus')) { dependencies { dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1f9d30d520171b..88900e06d48451 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,9 +1,11 @@ -apply plugin: 'java' - buildscript { apply from: '../repositories.gradle' } +plugins { + id 'java' +} + dependencies { /** * Forked version of abandoned repository: https://github.com/fge/json-schema-avro @@ -21,6 +23,6 @@ dependencies { implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' implementation 'commons-io:commons-io:2.11.0' - compileOnly 'org.projectlombok:lombok:1.18.14' - annotationProcessor 'org.projectlombok:lombok:1.18.14' + compileOnly 'org.projectlombok:lombok:1.18.30' + annotationProcessor 'org.projectlombok:lombok:1.18.30' } \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/metadata/aspect/plugins/config b/buildSrc/src/main/java/com/linkedin/metadata/aspect/plugins/config new file mode 120000 index 00000000000000..087629f8ac1df2 --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/metadata/aspect/plugins/config @@ -0,0 +1 @@ +../../../../../../../../../entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config \ No newline at end of file diff --git a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java index 25bf239ab835b7..1c9dfd46866102 100644 --- a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java +++ b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java @@ -183,6 +183,7 @@ private void generateSchema(final File file) { final String fileBaseName; try { final JsonNode schema = JsonLoader.fromFile(file); + final JsonNode result = buildResult(schema.toString()); String prettySchema = JacksonUtils.prettyPrint(result); Path absolutePath = file.getAbsoluteFile().toPath(); @@ -195,11 +196,21 @@ private void generateSchema(final File file) { } else { fileBaseName = getBaseName(file.getName()); } - Files.write(Paths.get(jsonDirectory + sep + fileBaseName + ".json"), + + final String targetName; + if (schema.has("Aspect") && schema.get("Aspect").has("name") && + !schema.get("Aspect").get("name").asText().equalsIgnoreCase(fileBaseName)) { + targetName = OpenApiEntities.toUpperFirst(schema.get("Aspect").get("name").asText()); + prettySchema = prettySchema.replaceAll(fileBaseName, targetName); + } else { + targetName = fileBaseName; + } + + Files.write(Paths.get(jsonDirectory + sep + targetName + ".json"), prettySchema.getBytes(StandardCharsets.UTF_8), StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); if (schema.has("Aspect")) { - aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + getBaseName(file.getName()))); + aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + targetName)); } } catch (IOException | ProcessingException e) { throw new RuntimeException(e); diff --git a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java index 888c4a0e999311..e97acb0b43c81d 100644 --- a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java +++ b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; @@ -58,8 +59,12 @@ public class OpenApiEntities { .add("notebookInfo").add("editableNotebookProperties") .add("dataProductProperties") .add("institutionalMemory") + .add("forms").add("formInfo").add("dynamicFormAssignment") .build(); + private final static ImmutableSet ENTITY_EXCLUSIONS = ImmutableSet.builder() + .add("structuredProperty") + .build(); public OpenApiEntities(JsonNodeFactory NODE_FACTORY) { this.NODE_FACTORY = NODE_FACTORY; @@ -117,14 +122,27 @@ public ObjectNode entityExtension(List nodesList, ObjectNode schemas return componentsNode; } - private static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + /** + * Convert the pdl model names to desired class names. Upper case first letter unless the 3rd character is upper case. + * i.e. mlModel -> MLModel + * dataset -> Dataset + * dataProduct -> DataProduct + * @param s input string + * @return class name + */ + public static String toUpperFirst(String s) { + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toUpperCase() + s.substring(2); + } else { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } private Set withEntitySchema(ObjectNode schemasNode, Set definitions) { return entityMap.values().stream() // Make sure the primary key is defined .filter(entity -> definitions.contains(toUpperFirst(entity.getKeyAspect()))) + .filter(entity -> !ENTITY_EXCLUSIONS.contains(entity.getName())) .map(entity -> { final String upperName = toUpperFirst(entity.getName()); @@ -442,6 +460,22 @@ private ObjectNode buildListEntityPath(Entity entity, Set parameterDefin ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", "Create " + upperFirst) .put("operationId", String.format("create", upperFirst)); + ArrayNode postParameters = NODE_FACTORY.arrayNode(); + postMethod.set("parameters", postParameters); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createIfNotExists") + .put("description", "Create the aspect if it does not already exist.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createEntityIfNotExists") + .put("description", "Create the entity ONLY if it does not already exist. Fails in case when the entity exists.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", "Create " + entity.getName() + " entities.") .put("required", true) @@ -547,7 +581,7 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode getMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Get %s for %s.", aspect, entity.getName())) - .put("operationId", String.format("get%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("get%s", upperFirstAspect)); getMethod.set("tags", tagsNode); ArrayNode singlePathParametersNode = NODE_FACTORY.arrayNode(); getMethod.set("parameters", singlePathParametersNode); @@ -575,13 +609,13 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { .set("application/json", NODE_FACTORY.objectNode()))); ObjectNode headMethod = NODE_FACTORY.objectNode() .put("summary", String.format("%s on %s existence.", aspect, upperFirstEntity)) - .put("operationId", String.format("head%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("head%s", upperFirstAspect)) .set("responses", headResponses); headMethod.set("tags", tagsNode); ObjectNode deleteMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Delete %s on entity %s", aspect, upperFirstEntity)) - .put("operationId", String.format("delete%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("delete%s", upperFirstAspect)) .set("responses", NODE_FACTORY.objectNode() .set("200", NODE_FACTORY.objectNode() .put("description", String.format("Delete %s on %s entity.", aspect, upperFirstEntity)) @@ -591,7 +625,23 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Create aspect %s on %s ", aspect, upperFirstEntity)) - .put("operationId", String.format("create%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("create%s", upperFirstAspect)); + ArrayNode postParameters = NODE_FACTORY.arrayNode(); + postMethod.set("parameters", postParameters); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createIfNotExists") + .put("description", "Create the aspect if it does not already exist.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createEntityIfNotExists") + .put("description", "Create the entity if it does not already exist. Fails in case when the entity exists.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", String.format("Create aspect %s on %s entity.", aspect, upperFirstEntity)) .put("required", true).set("content", NODE_FACTORY.objectNode() diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index ef33bde8f61d39..5466c5f127635c 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,20 +1,18 @@ package auth; import static auth.AuthUtils.*; -import static auth.sso.oidc.OidcConfigs.*; import static utils.ConfigUtil.*; -import auth.sso.SsoConfigs; import auth.sso.SsoManager; -import auth.sso.oidc.OidcConfigs; -import auth.sso.oidc.OidcProvider; import client.AuthServiceClient; import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.google.inject.Singleton; +import com.google.inject.name.Named; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.metadata.restli.DefaultRestliClientFactory; @@ -23,14 +21,18 @@ import config.ConfigurationProvider; import controllers.SsoCallbackController; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; + +import io.datahubproject.metadata.context.ActorContext; +import io.datahubproject.metadata.context.AuthorizerContext; +import io.datahubproject.metadata.context.EntityRegistryContext; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.SearchContext; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.pac4j.core.client.Client; -import org.pac4j.core.client.Clients; import org.pac4j.core.config.Config; import org.pac4j.core.context.session.SessionStore; import org.pac4j.play.LogoutController; @@ -45,6 +47,7 @@ import utils.ConfigUtil; /** Responsible for configuring, validating, and providing authentication related components. */ +@Slf4j public class AuthModule extends AbstractModule { /** @@ -58,6 +61,7 @@ public class AuthModule extends AbstractModule { private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + private static final String GET_SSO_SETTINGS_ENDPOINT = "auth/getSsoSettings"; private final com.typesafe.config.Config _configs; @@ -111,6 +115,7 @@ protected void configure() { Authentication.class, SystemEntityClient.class, AuthServiceClient.class, + org.pac4j.core.config.Config.class, com.typesafe.config.Config.class)); } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException( @@ -124,34 +129,20 @@ protected void configure() { @Provides @Singleton - protected Config provideConfig(SsoManager ssoManager) { - if (ssoManager.isSsoEnabled()) { - final Clients clients = new Clients(); - final List clientList = new ArrayList<>(); - clientList.add(ssoManager.getSsoProvider().client()); - clients.setClients(clientList); - final Config config = new Config(clients); - config.setHttpActionAdapter(new PlayHttpActionAdapter()); - return config; - } - return new Config(); + protected Config provideConfig() { + Config config = new Config(); + config.setHttpActionAdapter(new PlayHttpActionAdapter()); + return config; } @Provides @Singleton - protected SsoManager provideSsoManager() { - SsoManager manager = new SsoManager(); - // Seed the SSO manager with a default SSO provider. - if (isSsoEnabled(_configs)) { - SsoConfigs ssoConfigs = new SsoConfigs(_configs); - if (ssoConfigs.isOidcEnabled()) { - // Register OIDC Provider, add to list of managers. - OidcConfigs oidcConfigs = new OidcConfigs(_configs); - OidcProvider oidcProvider = new OidcProvider(oidcConfigs); - // Set the default SSO provider to this OIDC client. - manager.setSsoProvider(oidcProvider); - } - } + protected SsoManager provideSsoManager( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + SsoManager manager = + new SsoManager( + _configs, systemAuthentication, getSsoSettingsRequestUrl(_configs), httpClient); + manager.initializeSsoProvider(); return manager; } @@ -170,6 +161,31 @@ protected Authentication provideSystemAuthentication() { Collections.emptyMap()); } + @Provides + @Singleton + @Named("systemOperationContext") + protected OperationContext provideOperationContext(final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + ActorContext systemActorContext = + ActorContext.builder() + .systemAuth(true) + .authentication(systemAuthentication) + .build(); + OperationContextConfig systemConfig = OperationContextConfig.builder() + .viewAuthorizationConfiguration(configurationProvider.getAuthorization().getView()) + .allowSystemAuthentication(true) + .build(); + + return OperationContext.builder() + .operationContextConfig(systemConfig) + .systemActorContext(systemActorContext) + .searchContext(SearchContext.EMPTY) + .entityRegistryContext(EntityRegistryContext.EMPTY) + // Authorizer.EMPTY doesn't actually apply to system auth + .authorizerContext(AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build()) + .build(systemAuthentication); + } + @Provides @Singleton protected ConfigurationProvider provideConfigurationProvider() { @@ -181,43 +197,26 @@ protected ConfigurationProvider provideConfigurationProvider() { @Provides @Singleton protected SystemEntityClient provideEntityClient( - final Authentication systemAuthentication, + @Named("systemOperationContext") final OperationContext systemOperationContext, final ConfigurationProvider configurationProvider) { return new SystemRestliEntityClient( + systemOperationContext, buildRestliClient(), new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), - systemAuthentication, configurationProvider.getCache().getClient().getEntityClient()); } - @Provides - @Singleton - protected CloseableHttpClient provideHttpClient() { - return HttpClients.createDefault(); - } - @Provides @Singleton protected AuthServiceClient provideAuthClient( Authentication systemAuthentication, CloseableHttpClient httpClient) { // Init a GMS auth client - final String metadataServiceHost = - _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) - ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + final String metadataServiceHost = getMetadataServiceHost(_configs); - final int metadataServicePort = - _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) - ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt( - Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); + final int metadataServicePort = getMetadataServicePort(_configs); - final Boolean metadataServiceUseSsl = - _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) - ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean( - Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + final boolean metadataServiceUseSsl = doesMetadataServiceUseSsl(_configs); return new AuthServiceClient( metadataServiceHost, @@ -227,6 +226,12 @@ protected AuthServiceClient provideAuthClient( httpClient); } + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } + private com.linkedin.restli.client.Client buildRestliClient() { final String metadataServiceHost = utils.ConfigUtil.getString( @@ -255,16 +260,33 @@ private com.linkedin.restli.client.Client buildRestliClient() { metadataServiceSslProtocol); } - protected boolean isSsoEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + protected boolean doesMetadataServiceUseSsl(com.typesafe.config.Config configs) { + return configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + ? configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean( + Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + } + + protected String getMetadataServiceHost(com.typesafe.config.Config configs) { + return configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) + ? configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); } - protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH))); + protected Integer getMetadataServicePort(com.typesafe.config.Config configs) { + return configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) + ? configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt( + Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); + } + + protected String getSsoSettingsRequestUrl(com.typesafe.config.Config configs) { + final String protocol = doesMetadataServiceUseSsl(configs) ? "https" : "http"; + final String metadataServiceHost = getMetadataServiceHost(configs); + final Integer metadataServicePort = getMetadataServicePort(configs); + + return String.format( + "%s://%s:%s/%s", + protocol, metadataServiceHost, metadataServicePort, GET_SSO_SETTINGS_ENDPOINT); } } diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 283a2164584b95..51bb784c61b3b1 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -56,6 +56,29 @@ public class AuthUtils { public static final String TITLE = "title"; public static final String INVITE_TOKEN = "inviteToken"; public static final String RESET_TOKEN = "resetToken"; + public static final String BASE_URL = "baseUrl"; + public static final String OIDC_ENABLED = "oidcEnabled"; + public static final String CLIENT_ID = "clientId"; + public static final String CLIENT_SECRET = "clientSecret"; + public static final String DISCOVERY_URI = "discoveryUri"; + + public static final String USER_NAME_CLAIM = "userNameClaim"; + public static final String USER_NAME_CLAIM_REGEX = "userNameClaimRegex"; + public static final String SCOPE = "scope"; + public static final String CLIENT_NAME = "clientName"; + public static final String CLIENT_AUTHENTICATION_METHOD = "clientAuthenticationMethod"; + public static final String JIT_PROVISIONING_ENABLED = "jitProvisioningEnabled"; + public static final String PRE_PROVISIONING_REQUIRED = "preProvisioningRequired"; + public static final String EXTRACT_GROUPS_ENABLED = "extractGroupsEnabled"; + public static final String GROUPS_CLAIM = "groupsClaim"; + public static final String RESPONSE_TYPE = "responseType"; + public static final String RESPONSE_MODE = "responseMode"; + public static final String USE_NONCE = "useNonce"; + public static final String READ_TIMEOUT = "readTimeout"; + public static final String EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "extractJwtAccessTokenClaims"; + // Retained for backwards compatibility + public static final String PREFERRED_JWS_ALGORITHM = "preferredJwsAlgorithm"; + public static final String PREFERRED_JWS_ALGORITHM_2 = "preferredJwsAlgorithm2"; /** * Determines whether the inbound request should be forward to downstream Metadata Service. Today, diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java index 1f8455e773ffb1..976d0826f22770 100644 --- a/datahub-frontend/app/auth/sso/SsoConfigs.java +++ b/datahub-frontend/app/auth/sso/SsoConfigs.java @@ -1,8 +1,16 @@ package auth.sso; -import static auth.ConfigUtil.*; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; -/** Class responsible for extracting and validating top-level SSO related configurations. */ +import static auth.AuthUtils.*; + + +/** + * Class responsible for extracting and validating top-level SSO related configurations. TODO: + * Refactor SsoConfigs to have OidcConfigs and other identity provider specific configs as instance + * variables. SSoManager should ideally not know about identity provider specific configs. + */ public class SsoConfigs { /** Required configs */ @@ -22,16 +30,11 @@ public class SsoConfigs { private final String _authSuccessRedirectPath; private final Boolean _oidcEnabled; - public SsoConfigs(final com.typesafe.config.Config configs) { - _authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH); - _authBaseCallbackPath = - getOptional(configs, AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, DEFAULT_BASE_CALLBACK_PATH); - _authSuccessRedirectPath = - getOptional(configs, AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, DEFAULT_SUCCESS_REDIRECT_PATH); - _oidcEnabled = - configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + public SsoConfigs(Builder builder) { + _authBaseUrl = builder._authBaseUrl; + _authBaseCallbackPath = builder._authBaseCallbackPath; + _authSuccessRedirectPath = builder._authSuccessRedirectPath; + _oidcEnabled = builder._oidcEnabled; } public String getAuthBaseUrl() { @@ -49,4 +52,52 @@ public String getAuthSuccessRedirectPath() { public Boolean isOidcEnabled() { return _oidcEnabled; } + + public static class Builder> { + protected String _authBaseUrl = null; + private String _authBaseCallbackPath = DEFAULT_BASE_CALLBACK_PATH; + private String _authSuccessRedirectPath = DEFAULT_SUCCESS_REDIRECT_PATH; + protected Boolean _oidcEnabled = false; + private final ObjectMapper _objectMapper = new ObjectMapper(); + protected JsonNode jsonNode = null; + + // No need to check if changes are made since this method is only called at start-up. + public Builder from(final com.typesafe.config.Config configs) { + if (configs.hasPath(AUTH_BASE_URL_CONFIG_PATH)) { + _authBaseUrl = configs.getString(AUTH_BASE_URL_CONFIG_PATH); + } + if (configs.hasPath(AUTH_BASE_CALLBACK_PATH_CONFIG_PATH)) { + _authBaseCallbackPath = configs.getString(AUTH_BASE_CALLBACK_PATH_CONFIG_PATH); + } + if (configs.hasPath(OIDC_ENABLED_CONFIG_PATH)) { + _oidcEnabled = + Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + } + if (configs.hasPath(AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH)) { + _authSuccessRedirectPath = configs.getString(AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH); + } + return this; + } + + public Builder from(String ssoSettingsJsonStr) { + try { + jsonNode = _objectMapper.readTree(ssoSettingsJsonStr); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to parse ssoSettingsJsonStr %s into JSON", ssoSettingsJsonStr)); + } + if (jsonNode.has(BASE_URL)) { + _authBaseUrl = jsonNode.get(BASE_URL).asText(); + } + if (jsonNode.has(OIDC_ENABLED)) { + _oidcEnabled = jsonNode.get(OIDC_ENABLED).asBoolean(); + } + + return this; + } + + public SsoConfigs build() { + return new SsoConfigs(this); + } + } } diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java index bf33f4148a5531..8377eb40e237f7 100644 --- a/datahub-frontend/app/auth/sso/SsoManager.java +++ b/datahub-frontend/app/auth/sso/SsoManager.java @@ -1,13 +1,49 @@ package auth.sso; +import auth.sso.oidc.OidcConfigs; +import auth.sso.oidc.OidcProvider; +import com.datahub.authentication.Authentication; +import java.util.Objects; +import java.util.Optional; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; +import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import play.mvc.Http; -/** Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. */ +/** + * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. + * TODO: Refactor SsoManager to only accept SsoConfigs when initialized. See SsoConfigs TODO as + * well. + */ +@Slf4j public class SsoManager { private SsoProvider _provider; // Only one active provider at a time. + private final Authentication + _authentication; // Authentication used to fetch SSO settings from GMS + private final String _ssoSettingsRequestUrl; // SSO settings request URL. + private final CloseableHttpClient _httpClient; // HTTP client for making requests to GMS. + private com.typesafe.config.Config _configs; - public SsoManager() {} + public SsoManager( + com.typesafe.config.Config configs, + Authentication authentication, + String ssoSettingsRequestUrl, + CloseableHttpClient httpClient) { + _configs = configs; + _authentication = Objects.requireNonNull(authentication, "authentication cannot be null"); + _ssoSettingsRequestUrl = + Objects.requireNonNull(ssoSettingsRequestUrl, "ssoSettingsRequestUrl cannot be null"); + _httpClient = Objects.requireNonNull(httpClient, "httpClient cannot be null"); + _provider = null; + } /** * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the @@ -16,6 +52,7 @@ public SsoManager() {} * @return true if SSO logic is enabled, false otherwise. */ public boolean isSsoEnabled() { + refreshSsoProvider(); return _provider != null; } @@ -24,17 +61,138 @@ public boolean isSsoEnabled() { * * @param provider the new {@link SsoProvider} to be used during authentication. */ - public void setSsoProvider(@Nonnull final SsoProvider provider) { + public void setSsoProvider(final SsoProvider provider) { _provider = provider; } + public void setConfigs(final com.typesafe.config.Config configs) { + _configs = configs; + } + + public void clearSsoProvider() { + _provider = null; + } + /** * Gets the active {@link SsoProvider} instance. * * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or * null if SSO is not enabled. */ + @Nullable public SsoProvider getSsoProvider() { return _provider; } + + public void initializeSsoProvider() { + SsoConfigs ssoConfigs = null; + try { + ssoConfigs = new SsoConfigs.Builder().from(_configs).build(); + } catch (Exception e) { + // Debug-level logging since this is expected to fail if SSO has not been configured. + log.debug(String.format("Missing SSO settings in static configs %s", _configs), e); + } + + if (ssoConfigs != null && ssoConfigs.isOidcEnabled()) { + try { + OidcConfigs oidcConfigs = new OidcConfigs.Builder().from(_configs).build(); + maybeUpdateOidcProvider(oidcConfigs); + } catch (Exception e) { + // Error-level logging since this is unexpected to fail if SSO has been configured. + log.error(String.format("Error building OidcConfigs from static configs %s", _configs), e); + } + } else { + // Clear the SSO Provider since no SSO is enabled. + clearSsoProvider(); + } + + refreshSsoProvider(); + } + + private void refreshSsoProvider() { + final Optional maybeSsoSettingsJsonStr = getDynamicSsoSettings(); + if (maybeSsoSettingsJsonStr.isEmpty()) { + return; + } + + // If we receive a non-empty response, try to update the SSO provider. + final String ssoSettingsJsonStr = maybeSsoSettingsJsonStr.get(); + SsoConfigs ssoConfigs; + try { + ssoConfigs = new SsoConfigs.Builder().from(ssoSettingsJsonStr).build(); + } catch (Exception e) { + log.error( + String.format( + "Error building SsoConfigs from invalid json %s, reusing previous settings", + ssoSettingsJsonStr), + e); + return; + } + + if (ssoConfigs != null && ssoConfigs.isOidcEnabled()) { + try { + OidcConfigs oidcConfigs = + new OidcConfigs.Builder().from(_configs, ssoSettingsJsonStr).build(); + maybeUpdateOidcProvider(oidcConfigs); + } catch (Exception e) { + log.error( + String.format( + "Error building OidcConfigs from invalid json %s, reusing previous settings", + ssoSettingsJsonStr), + e); + } + } else { + // Clear the SSO Provider since no SSO is enabled. + clearSsoProvider(); + } + } + + private void maybeUpdateOidcProvider(OidcConfigs oidcConfigs) { + SsoProvider existingSsoProvider = getSsoProvider(); + if (existingSsoProvider instanceof OidcProvider) { + OidcProvider existingOidcProvider = (OidcProvider) existingSsoProvider; + // If the existing provider is an OIDC provider and the configs are the same, do nothing. + if (existingOidcProvider.configs().equals(oidcConfigs)) { + return; + } + } + + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + setSsoProvider(oidcProvider); + } + + /** Call the Auth Service to get SSO settings */ + @Nonnull + private Optional getDynamicSsoSettings() { + CloseableHttpResponse response = null; + try { + final HttpPost request = new HttpPost(_ssoSettingsRequestUrl); + + // Build JSON request to verify credentials for a native user. + request.setEntity(new StringEntity("")); + + // Add authorization header with DataHub frontend system id and secret. + request.addHeader(Http.HeaderNames.AUTHORIZATION, _authentication.getCredentials()); + + response = _httpClient.execute(request); + final HttpEntity entity = response.getEntity(); + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { + // Successfully received the SSO settings + return Optional.of(EntityUtils.toString(entity)); + } else { + log.debug("No SSO settings received from Auth Service, reusing previous settings"); + } + } catch (Exception e) { + log.warn("Failed to get SSO settings due to exception, reusing previous settings", e); + } finally { + try { + if (response != null) { + response.close(); + } + } catch (Exception e) { + log.warn("Failed to close http response", e); + } + } + return Optional.empty(); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java index fa562f54312eca..c72c3537084834 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java @@ -10,6 +10,8 @@ import auth.sso.SsoManager; import client.AuthServiceClient; import com.datahub.authentication.Authentication; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.common.AuditStamp; import com.linkedin.common.CorpGroupUrnArray; import com.linkedin.common.CorpuserUrnArray; @@ -300,6 +302,29 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { return corpUserSnapshot; } + public static Collection getGroupNames(CommonProfile profile, Object groupAttribute, String groupsClaimName) { + Collection groupNames = Collections.emptyList(); + try { + if (groupAttribute instanceof Collection) { + // List of group names + groupNames = (Collection) profile.getAttribute(groupsClaimName, Collection.class); + } else if (groupAttribute instanceof String) { + String groupString = (String) groupAttribute; + ObjectMapper objectMapper = new ObjectMapper(); + try { + // Json list of group names + groupNames = objectMapper.readValue(groupString, new TypeReference>(){}); + } catch (Exception e) { + groupNames = Arrays.asList(groupString.split(",")); + } + } + } catch (Exception e) { + log.error(String.format( + "Failed to parse group names: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + } + return groupNames; + } private List extractGroups(CommonProfile profile) { log.debug( @@ -320,23 +345,7 @@ private List extractGroups(CommonProfile profile) { if (profile.containsAttribute(groupsClaimName)) { try { final List groupSnapshots = new ArrayList<>(); - final Collection groupNames; - final Object groupAttribute = profile.getAttribute(groupsClaimName); - if (groupAttribute instanceof Collection) { - // List of group names - groupNames = - (Collection) profile.getAttribute(groupsClaimName, Collection.class); - } else if (groupAttribute instanceof String) { - // Single group name - groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class)); - } else { - log.error( - String.format( - "Fail to parse OIDC group claim with name %s. Unknown type %s provided.", - groupsClaimName, groupAttribute.getClass())); - // Skip over group attribute. Do not throw. - groupNames = Collections.emptyList(); - } + Collection groupNames = getGroupNames(profile, profile.getAttribute(groupsClaimName), groupsClaimName); for (String groupName : groupNames) { // Create a basic CorpGroupSnapshot from the information. diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index 6877ca187da973..5de4eba9cb6798 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -1,8 +1,10 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; import static auth.ConfigUtil.*; import auth.sso.SsoConfigs; +import java.util.Objects; import java.util.Optional; import lombok.Getter; @@ -44,79 +46,204 @@ public class OidcConfigs extends SsoConfigs { private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; - private static final String DEFAULT_OIDC_SCOPE = - "openid profile email"; // Often "group" must be included for groups. + private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; + // Often "group" must be included for groups. private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; - private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = - "false"; // False since extraction of groups can overwrite existing group membership. + private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; + // False since extraction of groups can overwrite existing group membership. private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; - private String clientId; - private String clientSecret; - private String discoveryUri; - private String userNameClaim; - private String userNameClaimRegex; - private String scope; - private String clientName; - private String clientAuthenticationMethod; - private boolean jitProvisioningEnabled; - private boolean preProvisioningRequired; - private boolean extractGroupsEnabled; - private String groupsClaimName; - private Optional responseType; - private Optional responseMode; - private Optional useNonce; - private Optional customParamResource; - private String readTimeout; - private Optional extractJwtAccessTokenClaims; + private final String clientId; + private final String clientSecret; + private final String discoveryUri; + private final String userNameClaim; + private final String userNameClaimRegex; + private final String scope; + private final String clientName; + private final String clientAuthenticationMethod; + private final boolean jitProvisioningEnabled; + private final boolean preProvisioningRequired; + private final boolean extractGroupsEnabled; + private final String groupsClaimName; + private final Optional responseType; + private final Optional responseMode; + private final Optional useNonce; + private final Optional customParamResource; + private final String readTimeout; + private final Optional extractJwtAccessTokenClaims; private Optional preferredJwsAlgorithm; - public OidcConfigs(final com.typesafe.config.Config configs) { - super(configs); - clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); - clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); - discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); - userNameClaim = - getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); - userNameClaimRegex = - getOptional( - configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); - scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); - clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); - clientAuthenticationMethod = - getOptional( - configs, - OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, - DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); - jitProvisioningEnabled = - Boolean.parseBoolean( - getOptional( - configs, - OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, - DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); - preProvisioningRequired = - Boolean.parseBoolean( - getOptional( - configs, - OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, - DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); - extractGroupsEnabled = - Boolean.parseBoolean( - getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); - groupsClaimName = - getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); - responseType = getOptional(configs, OIDC_RESPONSE_TYPE); - responseMode = getOptional(configs, OIDC_RESPONSE_MODE); - useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); - customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); - readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); - extractJwtAccessTokenClaims = - getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); - preferredJwsAlgorithm = - Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + public OidcConfigs(Builder builder) { + super(builder); + this.clientId = builder.clientId; + this.clientSecret = builder.clientSecret; + this.discoveryUri = builder.discoveryUri; + this.userNameClaim = builder.userNameClaim; + this.userNameClaimRegex = builder.userNameClaimRegex; + this.scope = builder.scope; + this.clientName = builder.clientName; + this.clientAuthenticationMethod = builder.clientAuthenticationMethod; + this.jitProvisioningEnabled = builder.jitProvisioningEnabled; + this.preProvisioningRequired = builder.preProvisioningRequired; + this.extractGroupsEnabled = builder.extractGroupsEnabled; + this.groupsClaimName = builder.groupsClaimName; + this.responseType = builder.responseType; + this.responseMode = builder.responseMode; + this.useNonce = builder.useNonce; + this.customParamResource = builder.customParamResource; + this.readTimeout = builder.readTimeout; + this.extractJwtAccessTokenClaims = builder.extractJwtAccessTokenClaims; + this.preferredJwsAlgorithm = builder.preferredJwsAlgorithm; + } + + public static class Builder extends SsoConfigs.Builder { + private String clientId; + private String clientSecret; + private String discoveryUri; + private String userNameClaim = DEFAULT_OIDC_USERNAME_CLAIM; + private String userNameClaimRegex = DEFAULT_OIDC_USERNAME_CLAIM_REGEX; + private String scope = DEFAULT_OIDC_SCOPE; + private String clientName = DEFAULT_OIDC_CLIENT_NAME; + private String clientAuthenticationMethod = DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD; + private boolean jitProvisioningEnabled = + Boolean.parseBoolean(DEFAULT_OIDC_JIT_PROVISIONING_ENABLED); + private boolean preProvisioningRequired = + Boolean.parseBoolean(DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED); + private boolean extractGroupsEnabled = + Boolean.parseBoolean(DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED); + private String groupsClaimName = DEFAULT_OIDC_GROUPS_CLAIM; + private Optional responseType = Optional.empty(); + private Optional responseMode = Optional.empty(); + private Optional useNonce = Optional.empty(); + private Optional customParamResource = Optional.empty(); + private String readTimeout = DEFAULT_OIDC_READ_TIMEOUT; + private Optional extractJwtAccessTokenClaims = Optional.empty(); + private Optional preferredJwsAlgorithm = Optional.empty(); + + public Builder from(final com.typesafe.config.Config configs) { + super.from(configs); + clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); + clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); + discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); + userNameClaim = + getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); + userNameClaimRegex = + getOptional( + configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); + scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); + clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); + clientAuthenticationMethod = + getOptional( + configs, + OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, + DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); + jitProvisioningEnabled = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, + DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); + preProvisioningRequired = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, + DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); + extractGroupsEnabled = + Boolean.parseBoolean( + getOptional( + configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); + groupsClaimName = + getOptional( + configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); + responseType = getOptional(configs, OIDC_RESPONSE_TYPE); + responseMode = getOptional(configs, OIDC_RESPONSE_MODE); + useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); + customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); + readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); + extractJwtAccessTokenClaims = + getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + return this; + } + + public Builder from(final com.typesafe.config.Config configs, final String ssoSettingsJsonStr) { + super.from(ssoSettingsJsonStr); + if (jsonNode.has(CLIENT_ID)) { + clientId = jsonNode.get(CLIENT_ID).asText(); + } + if (jsonNode.has(CLIENT_SECRET)) { + clientSecret = jsonNode.get(CLIENT_SECRET).asText(); + } + if (jsonNode.has(DISCOVERY_URI)) { + discoveryUri = jsonNode.get(DISCOVERY_URI).asText(); + } + if (jsonNode.has(USER_NAME_CLAIM)) { + userNameClaim = jsonNode.get(USER_NAME_CLAIM).asText(); + } + if (jsonNode.has(USER_NAME_CLAIM_REGEX)) { + userNameClaimRegex = jsonNode.get(USER_NAME_CLAIM_REGEX).asText(); + } + if (jsonNode.has(SCOPE)) { + scope = jsonNode.get(SCOPE).asText(); + } + if (jsonNode.has(CLIENT_NAME)) { + clientName = jsonNode.get(CLIENT_NAME).asText(); + } + if (jsonNode.has(CLIENT_AUTHENTICATION_METHOD)) { + clientAuthenticationMethod = jsonNode.get(CLIENT_AUTHENTICATION_METHOD).asText(); + } + if (jsonNode.has(JIT_PROVISIONING_ENABLED)) { + jitProvisioningEnabled = jsonNode.get(JIT_PROVISIONING_ENABLED).asBoolean(); + } + if (jsonNode.has(PRE_PROVISIONING_REQUIRED)) { + preProvisioningRequired = jsonNode.get(PRE_PROVISIONING_REQUIRED).asBoolean(); + } + if (jsonNode.has(EXTRACT_GROUPS_ENABLED)) { + extractGroupsEnabled = jsonNode.get(EXTRACT_GROUPS_ENABLED).asBoolean(); + } + if (jsonNode.has(GROUPS_CLAIM)) { + groupsClaimName = jsonNode.get(GROUPS_CLAIM).asText(); + } + if (jsonNode.has(RESPONSE_TYPE)) { + responseType = Optional.of(jsonNode.get(RESPONSE_TYPE).asText()); + } + if (jsonNode.has(RESPONSE_MODE)) { + responseMode = Optional.of(jsonNode.get(RESPONSE_MODE).asText()); + } + if (jsonNode.has(USE_NONCE)) { + useNonce = Optional.of(jsonNode.get(USE_NONCE).asBoolean()); + } + if (jsonNode.has(READ_TIMEOUT)) { + readTimeout = jsonNode.get(READ_TIMEOUT).asText(); + } + if (jsonNode.has(EXTRACT_JWT_ACCESS_TOKEN_CLAIMS)) { + extractJwtAccessTokenClaims = + Optional.of(jsonNode.get(EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).asBoolean()); + } + if (jsonNode.has(PREFERRED_JWS_ALGORITHM_2)) { + preferredJwsAlgorithm = Optional.of(jsonNode.get(PREFERRED_JWS_ALGORITHM_2).asText()); + } else { + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + } + + return this; + } + + public OidcConfigs build() { + Objects.requireNonNull(_oidcEnabled, "oidcEnabled is required"); + Objects.requireNonNull(clientId, "clientId is required"); + Objects.requireNonNull(clientSecret, "clientSecret is required"); + Objects.requireNonNull(discoveryUri, "discoveryUri is required"); + Objects.requireNonNull(_authBaseUrl, "authBaseUrl is required"); + + return new OidcConfigs(this); + } } } diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 4d40f45cd09b48..baa992994d8ba6 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -3,6 +3,7 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.inject.Inject; import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; @@ -47,6 +48,7 @@ public class AuthServiceClient { private final Authentication systemAuthentication; private final CloseableHttpClient httpClient; + @Inject public AuthServiceClient( @Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 3d87267f8ebe38..9f548b104e8fe0 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,5 +1,7 @@ package config; +import com.datahub.authorization.AuthorizationConfiguration; +import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; @@ -22,4 +24,10 @@ public class ConfigurationProvider { /** Configuration for caching */ private CacheConfiguration cache; + + /** Configuration for the view layer */ + private VisualConfiguration visualConfig; + + /** Configuration for authorization */ + private AuthorizationConfiguration authorization; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 60971bf06e27bd..df0cd4f4ff82fc 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -13,6 +13,7 @@ import com.linkedin.util.Pair; import com.typesafe.config.Config; import java.io.InputStream; +import java.net.URI; import java.time.Duration; import java.util.List; import java.util.Map; @@ -125,6 +126,12 @@ public CompletableFuture proxy(String path, Http.Request request) headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } + if (!headers.containsKey(Http.HeaderNames.X_FORWARDED_PROTO)) { + final String schema = + Optional.ofNullable(URI.create(request.uri()).getScheme()).orElse("http"); + headers.put(Http.HeaderNames.X_FORWARDED_PROTO, List.of(schema)); + } + return _ws.url( String.format( "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index 9c232e965a0034..d9568c25f6e8c6 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -15,12 +15,15 @@ import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.typesafe.config.Config; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Optional; import javax.annotation.Nonnull; import javax.inject.Inject; +import org.apache.commons.httpclient.InvalidRedirectLocationException; import org.apache.commons.lang3.StringUtils; import org.pac4j.core.client.Client; import org.pac4j.core.context.Cookie; @@ -86,7 +89,17 @@ public Result authenticate(Http.Request request) { final Optional maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); - final String redirectPath = maybeRedirectPath.orElse("/"); + String redirectPath = maybeRedirectPath.orElse("/"); + try { + URI redirectUri = new URI(redirectPath); + if (redirectUri.getScheme() != null || redirectUri.getAuthority() != null) { + throw new InvalidRedirectLocationException("Redirect location must be relative to the base url, cannot " + + "redirect to other domains: " + redirectPath, redirectPath); + } + } catch (URISyntaxException | InvalidRedirectLocationException e) { + _logger.warn(e.getMessage()); + redirectPath = "/"; + } if (AuthUtils.hasValidSessionCookie(request)) { return Results.redirect(redirectPath); diff --git a/datahub-frontend/app/controllers/RedirectController.java b/datahub-frontend/app/controllers/RedirectController.java new file mode 100644 index 00000000000000..17f86b7fbffae3 --- /dev/null +++ b/datahub-frontend/app/controllers/RedirectController.java @@ -0,0 +1,25 @@ +package controllers; + +import config.ConfigurationProvider; +import javax.inject.Inject; +import javax.inject.Singleton; +import play.mvc.Controller; +import play.mvc.Http; +import play.mvc.Result; + +@Singleton +public class RedirectController extends Controller { + + @Inject ConfigurationProvider config; + + public Result favicon(Http.Request request) { + if (config.getVisualConfig().getAssets().getFaviconUrl().startsWith("http")) { + return permanentRedirect(config.getVisualConfig().getAssets().getFaviconUrl()); + } else { + final String prefix = config.getVisualConfig().getAssets().getFaviconUrl().startsWith("/") ? "/public" : "/public/"; + return ok(Application.class.getResourceAsStream( + prefix + config.getVisualConfig().getAssets().getFaviconUrl())) + .as("image/x-icon"); + } + } +} diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java index 9f4445b1aa5c7f..fb0c324932b6af 100644 --- a/datahub-frontend/app/controllers/SsoCallbackController.java +++ b/datahub-frontend/app/controllers/SsoCallbackController.java @@ -9,11 +9,15 @@ import com.linkedin.entity.client.SystemEntityClient; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import javax.annotation.Nonnull; import javax.inject.Inject; import lombok.extern.slf4j.Slf4j; +import org.pac4j.core.client.Client; +import org.pac4j.core.client.Clients; import org.pac4j.core.config.Config; import org.pac4j.core.engine.CallbackLogic; import org.pac4j.core.http.adapter.HttpActionAdapter; @@ -34,6 +38,7 @@ public class SsoCallbackController extends CallbackController { private final SsoManager _ssoManager; + private final Config _config; @Inject public SsoCallbackController( @@ -41,8 +46,10 @@ public SsoCallbackController( @Nonnull Authentication systemAuthentication, @Nonnull SystemEntityClient entityClient, @Nonnull AuthServiceClient authClient, + @Nonnull Config config, @Nonnull com.typesafe.config.Config configs) { _ssoManager = ssoManager; + _config = config; setDefaultUrl("/"); // By default, redirects to Home Page on log in. setSaveInSession(false); setCallbackLogic( @@ -126,7 +133,18 @@ public Result perform( } private boolean shouldHandleCallback(final String protocol) { - return _ssoManager.isSsoEnabled() - && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + if (!_ssoManager.isSsoEnabled()) { + return false; + } + updateConfig(); + return _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + } + + private void updateConfig() { + final Clients clients = new Clients(); + final List clientList = new ArrayList<>(); + clientList.add(_ssoManager.getSsoProvider().client()); + clients.setClients(clientList); + _config.setClients(clients); } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index a1b97701dbf882..ab4ce405a55411 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -1,7 +1,7 @@ plugins { - id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' + id 'org.gradle.playframework' } apply from: "../gradle/versioning/versioning.gradle" @@ -20,7 +20,6 @@ model { } task myTar(type: Tar) { - extension = "tgz" compression = Compression.GZIP from("${buildDir}/stage") @@ -39,23 +38,6 @@ artifacts { archives myTar } -graphqlCodegen { - // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = ["$projectDir/conf/datahub-frontend.graphql".toString()] - outputDir = new File("$projectDir/app/graphql") - packageName = "generated" - generateApis = true - modelValidationAnnotation = "" - customTypesMapping = [ - Long: "Long", - ] -} - -tasks.withType(Checkstyle) { - exclude "**/generated/**" -} - - /* PLAY UPGRADE NOTE Generates the distribution jars under the expected names. The playFramework plugin only accepts certain name values @@ -105,7 +87,7 @@ docker { } } -task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) { +task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', dist]) { from ("${buildDir}/distributions") include "datahub-frontend-${version}.zip" into "${buildDir}/docker/" @@ -119,3 +101,23 @@ task cleanLocalDockerImages { } } dockerClean.finalizedBy(cleanLocalDockerImages) + +// gradle 8 fixes +tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist' +tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist' +stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist +tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts +playBinaryDistTar.dependsOn createMainStartScripts +playBinaryDistZip.dependsOn createMainStartScripts +createMainStartScripts.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageMainDist' +createPlayBinaryZipDist.dependsOn 'stageMainDist' diff --git a/datahub-frontend/conf/logback.xml b/datahub-frontend/conf/logback.xml index 2a542083e20a27..78da231b4a71c5 100644 --- a/datahub-frontend/conf/logback.xml +++ b/datahub-frontend/conf/logback.xml @@ -13,6 +13,7 @@ Unable to renew the session. The session store may not support this feature Preferred JWS algorithm: null not available. Using all metadata algorithms: + Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 3102c26497fedd..9eac7aa34c3e37 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,11 +36,18 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) -# Map static resources from the /public folder to the /assets URL path -GET /assets/*file controllers.Assets.at(path="/public", file) # Analytics route POST /track controllers.TrackingController.track(request: Request) -# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle +# Map static resources from the /public folder to the /assets URL path +GET /assets/icons/favicon.ico controllers.RedirectController.favicon(request: Request) + +# Known React asset routes +GET /assets/*file controllers.Assets.at(path="/public/assets", file) +GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) +GET /manifest.json controllers.Assets.at(path="/public", file="manifest.json") +GET /robots.txt controllers.Assets.at(path="/public", file="robots.txt") + +# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle's index.html GET /*path controllers.Application.index(path) diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index dd1ceee411f746..b14962e5900cd2 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -1,4 +1,3 @@ -apply plugin: "org.gradle.playframework" // Change this to listen on a different port project.ext.httpPort = 9001 @@ -77,7 +76,7 @@ dependencies { implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - runtimeOnly externalDependency.guice + runtimeOnly externalDependency.guicePlay runtimeOnly (externalDependency.playDocs) { exclude group: 'com.typesafe.akka', module: 'akka-http-core_2.12' } @@ -91,7 +90,7 @@ dependencies { play { platform { - playVersion = '2.8.18' + playVersion = '2.8.21' scalaVersion = '2.12' javaVersion = JavaVersion.VERSION_11 } @@ -101,4 +100,25 @@ play { test { useJUnitPlatform() + + testLogging.showStandardStreams = true + testLogging.exceptionFormat = 'full' + + def playJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + //"--add-opens=java.base/java.net=ALL-UNNAMED", + //"--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = playJava17CompatibleJvmArgs } diff --git a/datahub-frontend/public b/datahub-frontend/public new file mode 120000 index 00000000000000..60c68c7b4b1bc3 --- /dev/null +++ b/datahub-frontend/public @@ -0,0 +1 @@ +../datahub-web-react/public \ No newline at end of file diff --git a/datahub-frontend/run/logback.xml b/datahub-frontend/run/logback.xml index 9cabd3c923aa2c..5d275c821e16f1 100644 --- a/datahub-frontend/run/logback.xml +++ b/datahub-frontend/run/logback.xml @@ -13,6 +13,7 @@ Unable to renew the session. The session store may not support this feature Preferred JWS algorithm: null not available. Using all metadata algorithms: + Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java index a5da0951d16328..534cffb5cc7fe4 100644 --- a/datahub-frontend/test/app/ApplicationTest.java +++ b/datahub-frontend/test/app/ApplicationTest.java @@ -91,6 +91,9 @@ public int gmsServerPort() { @BeforeAll public void init() throws IOException { _gmsServer = new MockWebServer(); + _gmsServer.enqueue(new MockResponse().setResponseCode(404)); // dynamic settings - not tested + _gmsServer.enqueue(new MockResponse().setResponseCode(404)); // dynamic settings - not tested + _gmsServer.enqueue(new MockResponse().setResponseCode(404)); // dynamic settings - not tested _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER))); _gmsServer.enqueue( new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); @@ -192,8 +195,27 @@ public void testAPI() throws ParseException { } @Test - public void testOidcRedirectToRequestedUrl() throws InterruptedException { + public void testOidcRedirectToRequestedUrl() { browser.goTo("/authenticate?redirect_uri=%2Fcontainer%2Furn%3Ali%3Acontainer%3ADATABASE"); assertEquals("container/urn:li:container:DATABASE", browser.url()); } + + /** + * The Redirect Uri parameter is used to store a previous relative location within the app to be able to + * take a user back to their expected page. Redirecting to other domains should be blocked. + */ + @Test + public void testInvalidRedirectUrl() { + browser.goTo("/authenticate?redirect_uri=https%3A%2F%2Fwww.google.com"); + assertEquals("", browser.url()); + + browser.goTo("/authenticate?redirect_uri=file%3A%2F%2FmyFile"); + assertEquals("", browser.url()); + + browser.goTo("/authenticate?redirect_uri=ftp%3A%2F%2FsomeFtp"); + assertEquals("", browser.url()); + + browser.goTo("/authenticate?redirect_uri=localhost%3A9002%2Flogin"); + assertEquals("", browser.url()); + } } diff --git a/datahub-frontend/test/oidc/OidcCallbackLogicTest.java b/datahub-frontend/test/oidc/OidcCallbackLogicTest.java new file mode 100644 index 00000000000000..f4784c29e91f2e --- /dev/null +++ b/datahub-frontend/test/oidc/OidcCallbackLogicTest.java @@ -0,0 +1,64 @@ +package oidc; + +import auth.sso.oidc.OidcConfigs; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import static auth.sso.oidc.OidcCallbackLogic.getGroupNames; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.pac4j.core.profile.CommonProfile; + +public class OidcCallbackLogicTest { + + @Test + public void testGetGroupsClaimNamesJsonArray() { + CommonProfile profile = createMockProfileWithAttribute("[\"group1\", \"group2\"]", "groupsClaimName"); + Collection result = getGroupNames(profile, "[\"group1\", \"group2\"]", "groupsClaimName"); + assertEquals(Arrays.asList("group1", "group2"), result); + } + @Test + public void testGetGroupNamesWithSingleGroup() { + CommonProfile profile = createMockProfileWithAttribute("group1", "groupsClaimName"); + Collection result = getGroupNames(profile, "group1", "groupsClaimName"); + assertEquals(Arrays.asList("group1"), result); + } + + @Test + public void testGetGroupNamesWithCommaSeparated() { + CommonProfile profile = createMockProfileWithAttribute("group1,group2", "groupsClaimName"); + Collection result = getGroupNames(profile, "group1,group2", "groupsClaimName"); + assertEquals(Arrays.asList("group1", "group2"), result); + } + + @Test + public void testGetGroupNamesWithCollection() { + CommonProfile profile = createMockProfileWithAttribute(Arrays.asList("group1", "group2"), "groupsClaimName"); + Collection result = getGroupNames(profile, Arrays.asList("group1", "group2"), "groupsClaimName"); + assertEquals(Arrays.asList("group1", "group2"), result); + } + // Helper method to create a mock CommonProfile with given attribute + private CommonProfile createMockProfileWithAttribute(Object attribute, String attributeName) { + CommonProfile profile = mock(CommonProfile.class); + + // Mock for getAttribute(String) + when(profile.getAttribute(attributeName)).thenReturn(attribute); + + // Mock for getAttribute(String, Class) + if (attribute instanceof Collection) { + when(profile.getAttribute(attributeName, Collection.class)).thenReturn((Collection) attribute); + } else if (attribute instanceof String) { + when(profile.getAttribute(attributeName, String.class)).thenReturn((String) attribute); + } + // Add more conditions here if needed for other types + + return profile; + } +} diff --git a/datahub-web-react/public/logo.png b/datahub-frontend/test/resources/public/logos/datahub-logo.png similarity index 100% rename from datahub-web-react/public/logo.png rename to datahub-frontend/test/resources/public/logos/datahub-logo.png diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index a27a1462a8a277..8226d4e74cc21a 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,6 @@ package security; +import static auth.AuthUtils.*; import static auth.sso.oidc.OidcConfigs.*; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -24,6 +25,7 @@ import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; +import org.json.JSONObject; public class OidcConfigurationTest { @@ -311,8 +313,32 @@ public Config withValue(String path, ConfigValue value) { public void readTimeoutPropagation() { CONFIG.withValue(OIDC_READ_TIMEOUT, ConfigValueFactory.fromAnyRef("10000")); - OidcConfigs oidcConfigs = new OidcConfigs(CONFIG); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG); + OidcConfigs oidcConfigs = oidcConfigsBuilder.build(); OidcProvider oidcProvider = new OidcProvider(oidcConfigs); assertEquals(10000, ((OidcClient) oidcProvider.client()).getConfiguration().getReadTimeout()); } + + @Test + public void readPreferredJwsAlgorithmPropagationFromConfig() { + final String SSO_SETTINGS_JSON_STR = new JSONObject().put(PREFERRED_JWS_ALGORITHM, "HS256").toString(); + CONFIG.withValue(OIDC_PREFERRED_JWS_ALGORITHM, ConfigValueFactory.fromAnyRef("RS256")); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG, SSO_SETTINGS_JSON_STR); + OidcConfigs oidcConfigs = new OidcConfigs(oidcConfigsBuilder); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + assertEquals("RS256", ((OidcClient) oidcProvider.client()).getConfiguration().getPreferredJwsAlgorithm().toString()); + } + + @Test + public void readPreferredJwsAlgorithmPropagationFromJSON() { + final String SSO_SETTINGS_JSON_STR = new JSONObject().put(PREFERRED_JWS_ALGORITHM, "Unused").put(PREFERRED_JWS_ALGORITHM_2, "HS256").toString(); + CONFIG.withValue(OIDC_PREFERRED_JWS_ALGORITHM, ConfigValueFactory.fromAnyRef("RS256")); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG, SSO_SETTINGS_JSON_STR); + OidcConfigs oidcConfigs = new OidcConfigs(oidcConfigsBuilder); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + assertEquals("HS256", ((OidcClient) oidcProvider.client()).getConfiguration().getPreferredJwsAlgorithm().toString()); + } } diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index fba0031351b588..de264ce31b719b 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -1,16 +1,18 @@ plugins { + id 'java' id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" } -apply plugin: 'java' + dependencies { - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-service:auth-impl') implementation project(':metadata-service:auth-config') implementation project(':metadata-service:configuration') implementation project(':metadata-service:services') implementation project(':metadata-io') implementation project(':metadata-utils') + implementation project(':metadata-models') implementation externalDependency.graphqlJava implementation externalDependency.graphqlJavaScalars @@ -29,27 +31,16 @@ dependencies { graphqlCodegen { // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = [ - "$projectDir/src/main/resources/entity.graphql".toString(), - "$projectDir/src/main/resources/app.graphql".toString(), - "$projectDir/src/main/resources/search.graphql".toString(), - "$projectDir/src/main/resources/analytics.graphql".toString(), - "$projectDir/src/main/resources/recommendation.graphql".toString(), - "$projectDir/src/main/resources/ingestion.graphql".toString(), - "$projectDir/src/main/resources/auth.graphql".toString(), - "$projectDir/src/main/resources/timeline.graphql".toString(), - "$projectDir/src/main/resources/tests.graphql".toString(), - "$projectDir/src/main/resources/step.graphql".toString(), - "$projectDir/src/main/resources/lineage.graphql".toString(), - ] - outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java") + graphqlSchemaPaths = fileTree(dir: "${projectDir}/src/main/resources", include: '**/*.graphql').collect { it.absolutePath } + outputDir = new File("${projectDir}/src/mainGeneratedGraphQL/java") packageName = "com.linkedin.datahub.graphql.generated" + generateToString = true generateApis = true generateParameterizedFieldsResolvers = false modelValidationAnnotation = "@javax.annotation.Nonnull" customTypesMapping = [ - Long: "Long", - Float: "Float" + Long: "Long", + Float: "Float" ] } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index e45bed33eb0236..5f555b45d3b09c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -4,7 +4,6 @@ public class Constants { private Constants() {} - ; public static final String URN_FIELD_NAME = "urn"; public static final String URNS_FIELD_NAME = "urns"; @@ -19,9 +18,11 @@ private Constants() {} public static final String TESTS_SCHEMA_FILE = "tests.graphql"; public static final String STEPS_SCHEMA_FILE = "step.graphql"; public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String PROPERTIES_SCHEMA_FILE = "properties.graphql"; + public static final String FORMS_SCHEMA_FILE = "forms.graphql"; + public static final String INCIDENTS_SCHEMA_FILE = "incident.graphql"; public static final String BROWSE_PATH_DELIMITER = "/"; public static final String BROWSE_PATH_V2_DELIMITER = "␟"; public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index f0cb56b1a99ce4..ba7a159c635e4b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -53,15 +53,18 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; import com.linkedin.datahub.graphql.generated.Domain; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.EntityRelationship; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; import com.linkedin.datahub.graphql.generated.GetRootGlossaryNodesResult; import com.linkedin.datahub.graphql.generated.GetRootGlossaryTermsResult; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.generated.GlossaryTermAssociation; +import com.linkedin.datahub.graphql.generated.IncidentSource; import com.linkedin.datahub.graphql.generated.IngestionSource; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.LineageRelationship; @@ -88,24 +91,28 @@ import com.linkedin.datahub.graphql.generated.ParentDomainsResult; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterionValue; import com.linkedin.datahub.graphql.generated.QueryEntity; +import com.linkedin.datahub.graphql.generated.QueryProperties; import com.linkedin.datahub.graphql.generated.QuerySubject; import com.linkedin.datahub.graphql.generated.QuickFilter; import com.linkedin.datahub.graphql.generated.RecommendationContent; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; +import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SiblingProperties; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.TestResult; +import com.linkedin.datahub.graphql.generated.TypeQualifier; import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.resolvers.MeResolver; import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver; import com.linkedin.datahub.graphql.resolvers.assertion.DeleteAssertionResolver; import com.linkedin.datahub.graphql.resolvers.assertion.EntityAssertionsResolver; -import com.linkedin.datahub.graphql.resolvers.auth.CreateAccessTokenResolver; -import com.linkedin.datahub.graphql.resolvers.auth.GetAccessTokenResolver; -import com.linkedin.datahub.graphql.resolvers.auth.ListAccessTokensResolver; -import com.linkedin.datahub.graphql.resolvers.auth.RevokeAccessTokenResolver; +import com.linkedin.datahub.graphql.resolvers.auth.*; import com.linkedin.datahub.graphql.resolvers.browse.BrowsePathsResolver; import com.linkedin.datahub.graphql.resolvers.browse.BrowseResolver; import com.linkedin.datahub.graphql.resolvers.browse.EntityBrowsePathsResolver; @@ -121,7 +128,6 @@ import com.linkedin.datahub.graphql.resolvers.dataproduct.DeleteDataProductResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.ListDataProductAssetsResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.UpdateDataProductResolver; -import com.linkedin.datahub.graphql.resolvers.dataset.DatasetHealthResolver; import com.linkedin.datahub.graphql.resolvers.dataset.DatasetStatsSummaryResolver; import com.linkedin.datahub.graphql.resolvers.dataset.DatasetUsageStatsResolver; import com.linkedin.datahub.graphql.resolvers.deprecation.UpdateDeprecationResolver; @@ -135,6 +141,12 @@ import com.linkedin.datahub.graphql.resolvers.embed.UpdateEmbedResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityPrivilegesResolver; +import com.linkedin.datahub.graphql.resolvers.form.BatchAssignFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.BatchRemoveFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.CreateDynamicFormAssignmentResolver; +import com.linkedin.datahub.graphql.resolvers.form.IsFormAssignedToMeResolver; +import com.linkedin.datahub.graphql.resolvers.form.SubmitFormPromptResolver; +import com.linkedin.datahub.graphql.resolvers.form.VerifyFormResolver; import com.linkedin.datahub.graphql.resolvers.glossary.AddRelatedTermsResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryNodeResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryTermResolver; @@ -149,6 +161,10 @@ import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver; +import com.linkedin.datahub.graphql.resolvers.health.EntityHealthResolver; +import com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver; +import com.linkedin.datahub.graphql.resolvers.incident.RaiseIncidentResolver; +import com.linkedin.datahub.graphql.resolvers.incident.UpdateIncidentStatusResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateTestConnectionRequestResolver; @@ -159,6 +175,7 @@ import com.linkedin.datahub.graphql.resolvers.ingest.secret.DeleteSecretResolver; import com.linkedin.datahub.graphql.resolvers.ingest.secret.GetSecretValuesResolver; import com.linkedin.datahub.graphql.resolvers.ingest.secret.ListSecretsResolver; +import com.linkedin.datahub.graphql.resolvers.ingest.secret.UpdateSecretResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.DeleteIngestionSourceResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver; @@ -215,6 +232,7 @@ import com.linkedin.datahub.graphql.resolvers.post.CreatePostResolver; import com.linkedin.datahub.graphql.resolvers.post.DeletePostResolver; import com.linkedin.datahub.graphql.resolvers.post.ListPostsResolver; +import com.linkedin.datahub.graphql.resolvers.post.UpdatePostResolver; import com.linkedin.datahub.graphql.resolvers.query.CreateQueryResolver; import com.linkedin.datahub.graphql.resolvers.query.DeleteQueryResolver; import com.linkedin.datahub.graphql.resolvers.query.ListQueriesResolver; @@ -239,6 +257,7 @@ import com.linkedin.datahub.graphql.resolvers.settings.view.UpdateGlobalViewsSettingsResolver; import com.linkedin.datahub.graphql.resolvers.step.BatchGetStepStatesResolver; import com.linkedin.datahub.graphql.resolvers.step.BatchUpdateStepStatesResolver; +import com.linkedin.datahub.graphql.resolvers.structuredproperties.UpsertStructuredPropertiesResolver; import com.linkedin.datahub.graphql.resolvers.tag.CreateTagResolver; import com.linkedin.datahub.graphql.resolvers.tag.DeleteTagResolver; import com.linkedin.datahub.graphql.resolvers.tag.SetTagColorResolver; @@ -253,6 +272,7 @@ import com.linkedin.datahub.graphql.resolvers.type.EntityInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver; +import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver; import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver; @@ -287,9 +307,16 @@ import com.linkedin.datahub.graphql.types.dataset.DatasetType; import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; +import com.linkedin.datahub.graphql.types.datatype.DataTypeType; import com.linkedin.datahub.graphql.types.domain.DomainType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeType; +import com.linkedin.datahub.graphql.types.ermodelrelationship.CreateERModelRelationshipResolver; +import com.linkedin.datahub.graphql.types.ermodelrelationship.ERModelRelationshipType; +import com.linkedin.datahub.graphql.types.ermodelrelationship.UpdateERModelRelationshipResolver; +import com.linkedin.datahub.graphql.types.form.FormType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; +import com.linkedin.datahub.graphql.types.incident.IncidentType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureType; import com.linkedin.datahub.graphql.types.mlmodel.MLModelGroupType; @@ -299,9 +326,11 @@ import com.linkedin.datahub.graphql.types.ownership.OwnershipType; import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; +import com.linkedin.datahub.graphql.types.restricted.RestrictedType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; import com.linkedin.datahub.graphql.types.view.DataHubViewType; @@ -320,8 +349,9 @@ import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationshipService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -336,6 +366,8 @@ import graphql.schema.DataFetchingEnvironment; import graphql.schema.StaticDataFetcher; import graphql.schema.idl.RuntimeWiring; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -391,6 +423,9 @@ public class GmsGraphQLEngine { private final LineageService lineageService; private final QueryService queryService; private final DataProductService dataProductService; + private final ERModelRelationshipService erModelRelationshipService; + private final FormService formService; + private final RestrictedService restrictedService; private final FeatureFlags featureFlags; @@ -434,10 +469,20 @@ public class GmsGraphQLEngine { private final DataHubPolicyType dataHubPolicyType; private final DataHubRoleType dataHubRoleType; private final SchemaFieldType schemaFieldType; + private final ERModelRelationshipType erModelRelationshipType; private final DataHubViewType dataHubViewType; private final QueryType queryType; private final DataProductType dataProductType; private final OwnershipType ownershipType; + private final StructuredPropertyType structuredPropertyType; + private final DataTypeType dataTypeType; + private final EntityTypeType entityTypeType; + private final FormType formType; + private final IncidentType incidentType; + private final RestrictedType restrictedType; + + private final int graphQLQueryComplexityLimit; + private final int graphQLQueryDepthLimit; /** A list of GraphQL Plugins that extend the core engine */ private final List graphQLPlugins; @@ -492,7 +537,10 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.settingsService = args.settingsService; this.lineageService = args.lineageService; this.queryService = args.queryService; + this.erModelRelationshipService = args.erModelRelationshipService; this.dataProductService = args.dataProductService; + this.formService = args.formService; + this.restrictedService = args.restrictedService; this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); @@ -532,11 +580,21 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { this.testType = new TestType(entityClient); this.dataHubPolicyType = new DataHubPolicyType(entityClient); this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); + this.schemaFieldType = new SchemaFieldType(entityClient, featureFlags); + this.erModelRelationshipType = new ERModelRelationshipType(entityClient, featureFlags); this.dataHubViewType = new DataHubViewType(entityClient); this.queryType = new QueryType(entityClient); this.dataProductType = new DataProductType(entityClient); this.ownershipType = new OwnershipType(entityClient); + this.structuredPropertyType = new StructuredPropertyType(entityClient); + this.dataTypeType = new DataTypeType(entityClient); + this.entityTypeType = new EntityTypeType(entityClient); + this.formType = new FormType(entityClient); + this.incidentType = new IncidentType(entityClient); + this.restrictedType = new RestrictedType(entityClient, restrictedService); + + this.graphQLQueryComplexityLimit = args.graphQLQueryComplexityLimit; + this.graphQLQueryDepthLimit = args.graphQLQueryDepthLimit; // Init Lists this.entityTypes = @@ -569,14 +627,22 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubPolicyType, dataHubRoleType, schemaFieldType, + erModelRelationshipType, dataHubViewType, queryType, dataProductType, - ownershipType); + ownershipType, + structuredPropertyType, + dataTypeType, + entityTypeType, + formType, + incidentType, + restrictedType); this.loadableTypes = new ArrayList<>(entityTypes); // Extend loadable types with types from the plugins // This allows us to offer search and browse capabilities out of the box for those types for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + this.entityTypes.addAll(plugin.getEntityTypes()); Collection> pluginLoadableTypes = plugin.getLoadableTypes(); if (pluginLoadableTypes != null) { this.loadableTypes.addAll(pluginLoadableTypes); @@ -652,19 +718,27 @@ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { configureTestResultResolvers(builder); configureRoleResolvers(builder); configureSchemaFieldResolvers(builder); + configureERModelRelationshipResolvers(builder); configureEntityPathResolvers(builder); + configureResolvedAuditStampResolvers(builder); configureViewResolvers(builder); configureQueryEntityResolvers(builder); configureOwnershipTypeResolver(builder); configurePluginResolvers(builder); + configureStructuredPropertyResolvers(builder); + configureFormResolvers(builder); + configureIncidentResolvers(builder); + configureRestrictedResolvers(builder); } private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { builder.type( "Role", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "RoleAssociation", typeWiring -> @@ -702,7 +776,10 @@ public GraphQLEngine.Builder builder() { .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(PROPERTIES_SCHEMA_FILE)) + .addSchema(fileBasedSchema(FORMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INCIDENTS_SCHEMA_FILE)); for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { List pluginSchemaFiles = plugin.getSchemaFiles(); @@ -718,7 +795,9 @@ public GraphQLEngine.Builder builder() { builder .addDataLoaders(loaderSuppliers(loadableTypes)) .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) - .configureRuntimeWiring(this::configureRuntimeWiring); + .configureRuntimeWiring(this::configureRuntimeWiring) + .setGraphQLQueryComplexityLimit(graphQLQueryComplexityLimit) + .setGraphQLQueryDepthLimit(graphQLQueryDepthLimit); return builder; } @@ -766,6 +845,9 @@ private void configureContainerResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "platform", @@ -835,12 +917,14 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { "scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) .dataFetcher( - "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) + "searchAcrossLineage", + new SearchAcrossLineageResolver(this.entityClient, this.entityRegistry)) .dataFetcher( "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) .dataFetcher( "aggregateAcrossEntities", - new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + new AggregateAcrossEntitiesResolver( + this.entityClient, this.viewService, this.formService)) .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) .dataFetcher( "autoCompleteForMultiple", @@ -868,6 +952,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) .dataFetcher("domain", getResolver((domainType))) + .dataFetcher("erModelRelationship", getResolver(erModelRelationshipType)) .dataFetcher("dataPlatform", getResolver(dataPlatformType)) .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) @@ -885,6 +970,9 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher( + "getAccessTokenMetadata", + new GetAccessTokenMetadataResolver(statefulTokenService, this.entityClient)) .dataFetcher("container", getResolver(containerType)) .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) @@ -927,20 +1015,22 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) .dataFetcher( - "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + "browseV2", + new BrowseV2Resolver(this.entityClient, this.viewService, this.formService))); } private DataFetcher getEntitiesResolver() { return new BatchGetEntitiesResolver( entityTypes, (env) -> { + final QueryContext context = env.getContext(); List urns = env.getArgument(URNS_FIELD_NAME); return urns.stream() + .map(UrnUtils::getUrn) .map( (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + return UrnToEntityMapper.map(context, urn); } catch (Exception e) { throw new RuntimeException("Failed to get entity", e); } @@ -954,8 +1044,9 @@ private DataFetcher getEntityResolver() { entityTypes, (env) -> { try { + final QueryContext context = env.getContext(); Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); + return UrnToEntityMapper.map(context, urn); } catch (Exception e) { throw new RuntimeException("Failed to get entity", e); } @@ -994,6 +1085,13 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) + .dataFetcher( + "updateERModelRelationship", + new UpdateERModelRelationshipResolver(this.entityClient)) + .dataFetcher( + "createERModelRelationship", + new CreateERModelRelationshipResolver( + this.entityClient, this.erModelRelationshipService)) .dataFetcher("addTag", new AddTagResolver(entityService)) .dataFetcher("addTags", new AddTagsResolver(entityService)) .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) @@ -1042,6 +1140,8 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "updateSecret", new UpdateSecretResolver(this.entityClient, this.secretService)) .dataFetcher( "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) .dataFetcher( @@ -1085,9 +1185,12 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) .dataFetcher( "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) .dataFetcher( - "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + "addRelatedTerms", + new AddRelatedTermsResolver(this.entityService, this.entityClient)) + .dataFetcher( + "removeRelatedTerms", + new RemoveRelatedTermsResolver(this.entityService, this.entityClient)) .dataFetcher( "createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) @@ -1103,6 +1206,7 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) .dataFetcher("createPost", new CreatePostResolver(this.postService)) .dataFetcher("deletePost", new DeletePostResolver(this.postService)) + .dataFetcher("updatePost", new UpdatePostResolver(this.postService)) .dataFetcher( "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) .dataFetcher("createView", new CreateViewResolver(this.viewService)) @@ -1137,7 +1241,22 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { new UpdateOwnershipTypeResolver(this.ownershipTypeService)) .dataFetcher( "deleteOwnershipType", - new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + new DeleteOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher("submitFormPrompt", new SubmitFormPromptResolver(this.formService)) + .dataFetcher("batchAssignForm", new BatchAssignFormResolver(this.formService)) + .dataFetcher( + "createDynamicFormAssignment", + new CreateDynamicFormAssignmentResolver(this.formService)) + .dataFetcher( + "verifyForm", new VerifyFormResolver(this.formService, this.groupService)) + .dataFetcher("batchRemoveForm", new BatchRemoveFormResolver(this.formService)) + .dataFetcher( + "upsertStructuredProperties", + new UpsertStructuredPropertiesResolver(this.entityClient)) + .dataFetcher("raiseIncident", new RaiseIncidentResolver(this.entityClient)) + .dataFetcher( + "updateIncidentStatus", + new UpdateIncidentStatusResolver(this.entityClient, this.entityService))); } private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { @@ -1340,7 +1459,25 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder typeWiring.dataFetcher( "ownershipType", new EntityTypeResolver( - entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))) + .type( + "StructuredPropertiesEntry", + typeWiring -> + typeWiring + .dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertiesEntry) env.getSource()) + .getStructuredProperty() + .getUrn())) + .dataFetcher( + "valueEntities", + new BatchGetEntitiesResolver( + entityTypes, + (env) -> + ((StructuredPropertiesEntry) env.getSource()).getValueEntities()))); } /** @@ -1356,7 +1493,12 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1402,7 +1544,12 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) .dataFetcher( - "health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(true, true))) .dataFetcher("schemaMetadata", new AspectResolver()) .dataFetcher( "assertions", new EntityAssertionsResolver(entityClient, graphClient)) @@ -1420,6 +1567,14 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { "owner", new OwnerTypeResolver<>( ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "SchemaField", + typeWiring -> + typeWiring.dataFetcher( + "schemaFieldEntity", + new LoadableTypeResolver<>( + schemaFieldType, + (env) -> ((SchemaField) env.getSource()).getSchemaFieldEntity().getUrn()))) .type( "UserUsageCounts", typeWiring -> @@ -1516,6 +1671,8 @@ private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) .dataFetcher("schemaMetadata", new AspectResolver()) .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService))); } @@ -1526,7 +1683,9 @@ private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) typeWiring .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); } private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { @@ -1549,6 +1708,17 @@ private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); } + private void configureResolvedAuditStampResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "ResolvedAuditStamp", + typeWiring -> + typeWiring.dataFetcher( + "actor", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((ResolvedAuditStamp) env.getSource()).getActor().getUrn()))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.CorpUser} type. @@ -1557,8 +1727,12 @@ private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { builder.type( "CorpUser", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); builder.type( "CorpUserInfo", typeWiring -> @@ -1579,6 +1753,9 @@ private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService))); builder .type( @@ -1621,8 +1798,10 @@ private void configureTagAssociationResolver(final RuntimeWiring.Builder builder builder.type( "Tag", typeWiring -> - typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))); + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); builder.type( "TagAssociation", typeWiring -> @@ -1657,6 +1836,8 @@ private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) .dataFetcher( "platform", @@ -1687,7 +1868,12 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1718,7 +1904,14 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); builder.type( "DashboardInfo", typeWiring -> @@ -1756,6 +1949,42 @@ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { }))); } + private void configureStructuredPropertyResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "StructuredPropertyDefinition", + typeWiring -> + typeWiring + .dataFetcher( + "valueType", + new LoadableTypeResolver<>( + dataTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getValueType() + .getUrn())) + .dataFetcher( + "entityTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getEntityTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + builder.type( + "TypeQualifier", + typeWiring -> + typeWiring.dataFetcher( + "allowedTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((TypeQualifier) env.getSource()) + .getAllowedTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.Chart} type. @@ -1767,7 +1996,12 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) .dataFetcher( "platform", new LoadableTypeResolver<>( @@ -1797,7 +2031,14 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); builder.type( "ChartInfo", typeWiring -> @@ -1856,6 +2097,7 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) { .type( "HyperParameterValueType", typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver())) .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) .type( "TimeSeriesAspect", @@ -1868,6 +2110,60 @@ private void configureTypeExtensions(final RuntimeWiring.Builder builder) { builder.scalar(GraphQLLong); } + /** Configures resolvers responsible for resolving the {@link ERModelRelationship} type. */ + private void configureERModelRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "ERModelRelationship", + typeWiring -> + typeWiring + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ERModelRelationshipProperties", + typeWiring -> + typeWiring + .dataFetcher( + "source", + new LoadableTypeResolver<>( + datasetType, + (env) -> { + final ERModelRelationshipProperties erModelRelationshipProperties = + env.getSource(); + return erModelRelationshipProperties.getSource() != null + ? erModelRelationshipProperties.getSource().getUrn() + : null; + })) + .dataFetcher( + "destination", + new LoadableTypeResolver<>( + datasetType, + (env) -> { + final ERModelRelationshipProperties erModelRelationshipProperties = + env.getSource(); + return erModelRelationshipProperties.getDestination() != null + ? erModelRelationshipProperties.getDestination().getUrn() + : null; + }))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))); + } + /** * Configures resolvers responsible for resolving the {@link * com.linkedin.datahub.graphql.generated.DataJob} type. @@ -1881,12 +2177,24 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "dataFlow", new LoadableTypeResolver<>( dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataFlow() != null + ? dataJob.getDataFlow().getUrn() + : null; + })) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( @@ -1899,7 +2207,14 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { })) .dataFetcher("runs", new DataJobRunsResolver(entityClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))) .type( "DataJobInputOutput", typeWiring -> @@ -1944,13 +2259,19 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( dataPlatformType, (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( @@ -1960,7 +2281,14 @@ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }))); + })) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); } /** @@ -1977,13 +2305,21 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "platform", new LoadableTypeResolver<>( dataPlatformType, (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( @@ -2060,13 +2396,21 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher( "platform", new LoadableTypeResolver<>( dataPlatformType, (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( @@ -2101,13 +2445,21 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde "relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher( "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) .dataFetcher( "platform", new LoadableTypeResolver<>( dataPlatformType, (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( @@ -2124,8 +2476,16 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde typeWiring .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( @@ -2142,7 +2502,15 @@ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builde typeWiring .dataFetcher( "relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) .dataFetcher( "dataPlatformInstance", @@ -2177,6 +2545,9 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) { typeWiring .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); builder.type( "DomainAssociation", @@ -2191,12 +2562,65 @@ private void configureDomainResolvers(final RuntimeWiring.Builder builder) { .getUrn()))); } + private void configureFormResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "FormAssociation", + typeWiring -> + typeWiring.dataFetcher( + "form", + new LoadableTypeResolver<>( + formType, + (env) -> + ((com.linkedin.datahub.graphql.generated.FormAssociation) env.getSource()) + .getForm() + .getUrn()))); + builder.type( + "StructuredPropertyParams", + typeWiring -> + typeWiring.dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertyParams) env.getSource()) + .getStructuredProperty() + .getUrn()))); + builder.type( + "FormActorAssignment", + typeWiring -> + typeWiring + .dataFetcher( + "users", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + final FormActorAssignment actors = env.getSource(); + return actors.getUsers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> { + final FormActorAssignment actors = env.getSource(); + return actors.getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher("isAssignedToMe", new IsFormAssignedToMeResolver(groupService))); + } + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { builder.type( "DataProduct", typeWiring -> typeWiring .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); } @@ -2221,7 +2645,9 @@ private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { ? assertion.getDataPlatformInstance().getUrn() : null; })) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); } private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { @@ -2311,9 +2737,27 @@ private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) builder .type( "QueryEntity", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final QueryEntity query = env.getSource(); + return query.getPlatform() != null + ? query.getPlatform().getUrn() + : null; + }))) + .type( + "QueryProperties", typeWiring -> typeWiring.dataFetcher( - "relationships", new EntityRelationshipsResultResolver(graphClient))) + "origin", + new EntityTypeResolver( + entityTypes, (env) -> ((QueryProperties) env.getSource()).getOrigin()))) .type( "ListQueriesResult", typeWiring -> @@ -2363,7 +2807,10 @@ private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder b typeWiring -> typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) .dataFetcher( "state", new TimeSeriesAspectResolver( @@ -2435,4 +2882,47 @@ private void configureIngestionSourceResolvers(final RuntimeWiring.Builder build : null; }))); } + + private void configureIncidentResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Incident", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "IncidentSource", + typeWiring -> + typeWiring.dataFetcher( + "source", + new LoadableTypeResolver<>( + this.assertionType, + (env) -> { + final IncidentSource incidentSource = env.getSource(); + return incidentSource.getSource() != null + ? incidentSource.getSource().getUrn() + : null; + }))); + + // Add incidents attribute to all entities that support it + final List entitiesWithIncidents = + ImmutableList.of("Dataset", "DataJob", "DataFlow", "Dashboard", "Chart"); + for (String entity : entitiesWithIncidents) { + builder.type( + entity, + typeWiring -> + typeWiring.dataFetcher("incidents", new EntityIncidentsResolver(entityClient))); + } + } + + private void configureRestrictedResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Restricted", + typeWiring -> + typeWiring + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 4829194a8ce4d9..5f5e1c929f6ac7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -23,8 +23,9 @@ import com.linkedin.metadata.graph.SiblingGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationshipService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -34,6 +35,8 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.version.GitVersion; import com.linkedin.usage.UsageClient; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import lombok.Data; @Data @@ -73,6 +76,11 @@ public class GmsGraphQLEngineArgs { QueryService queryService; FeatureFlags featureFlags; DataProductService dataProductService; + ERModelRelationshipService erModelRelationshipService; + FormService formService; + RestrictedService restrictedService; + int graphQLQueryComplexityLimit; + int graphQLQueryDepthLimit; // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index 472d9465aeee12..a544bd46527c46 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql; +import com.linkedin.datahub.graphql.types.EntityType; import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.idl.RuntimeWiring; import java.util.Collection; @@ -34,6 +35,9 @@ public interface GmsGraphQLPlugin { */ Collection> getLoadableTypes(); + /** Return a list of Entity Types that the plugin services */ + Collection> getEntityTypes(); + /** * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific * resolvers. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java index f95727a1e8fd1d..67b20801d75083 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java @@ -6,6 +6,10 @@ import graphql.ExecutionInput; import graphql.ExecutionResult; import graphql.GraphQL; +import graphql.analysis.MaxQueryComplexityInstrumentation; +import graphql.analysis.MaxQueryDepthInstrumentation; +import graphql.execution.instrumentation.ChainedInstrumentation; +import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.tracing.TracingInstrumentation; import graphql.schema.GraphQLSchema; import graphql.schema.idl.RuntimeWiring; @@ -38,11 +42,17 @@ public class GraphQLEngine { private final GraphQL _graphQL; private final Map>> _dataLoaderSuppliers; + private final int graphQLQueryComplexityLimit; + private final int graphQLQueryDepthLimit; private GraphQLEngine( @Nonnull final List schemas, @Nonnull final RuntimeWiring runtimeWiring, - @Nonnull final Map>> dataLoaderSuppliers) { + @Nonnull final Map>> dataLoaderSuppliers, + @Nonnull final int graphQLQueryComplexityLimit, + @Nonnull final int graphQLQueryDepthLimit) { + this.graphQLQueryComplexityLimit = graphQLQueryComplexityLimit; + this.graphQLQueryDepthLimit = graphQLQueryDepthLimit; _dataLoaderSuppliers = dataLoaderSuppliers; @@ -63,10 +73,15 @@ private GraphQLEngine( /* * Instantiate engine */ + List instrumentations = new ArrayList<>(3); + instrumentations.add(new TracingInstrumentation()); + instrumentations.add(new MaxQueryDepthInstrumentation(graphQLQueryDepthLimit)); + instrumentations.add(new MaxQueryComplexityInstrumentation(graphQLQueryComplexityLimit)); + ChainedInstrumentation chainedInstrumentation = new ChainedInstrumentation(instrumentations); _graphQL = new GraphQL.Builder(graphQLSchema) .defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler()) - .instrumentation(new TracingInstrumentation()) + .instrumentation(chainedInstrumentation) .build(); } @@ -111,6 +126,8 @@ public static class Builder { private final Map>> _loaderSuppliers = new HashMap<>(); private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); + private int graphQLQueryComplexityLimit = 2000; + private int graphQLQueryDepthLimit = 50; /** * Used to add a schema file containing the GQL types resolved by the engine. @@ -162,9 +179,24 @@ public Builder configureRuntimeWiring(final Consumer buil return this; } + public Builder setGraphQLQueryComplexityLimit(final int queryComplexityLimit) { + this.graphQLQueryComplexityLimit = queryComplexityLimit; + return this; + } + + public Builder setGraphQLQueryDepthLimit(final int queryDepthLimit) { + this.graphQLQueryDepthLimit = queryDepthLimit; + return this; + } + /** Builds a {@link GraphQLEngine}. */ public GraphQLEngine build() { - return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); + return new GraphQLEngine( + _schemas, + _runtimeWiringBuilder.build(), + _loaderSuppliers, + graphQLQueryComplexityLimit, + graphQLQueryDepthLimit); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 9f110e713ed574..7dffd90cf2d7cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -3,6 +3,7 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; +import io.datahubproject.metadata.context.OperationContext; /** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { @@ -25,4 +26,9 @@ default String getActorUrn() { /** Returns the authorizer used to authorize specific actions. */ Authorizer getAuthorizer(); + + /** + * @return Returns the operational context + */ + OperationContext getOperationContext(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java new file mode 100644 index 00000000000000..ae8ac4330e7fb8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql; + +import com.linkedin.common.SubTypes; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.Collections; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@AllArgsConstructor +public class SubTypesResolver implements DataFetcher> { + + EntityClient _entityClient; + String _entityType; + String _aspectName; + + @Override + @Nullable + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + SubTypes subType = null; + final String urnStr = ((Entity) environment.getSource()).getUrn(); + try { + final Urn urn = Urn.createFromString(urnStr); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(_aspectName), + context.getAuthentication()) + .get(urn); + if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { + subType = + new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); + } + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); + } + return subType; + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index 22ee4d4d4845c9..fd23cd5fdda450 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.RawAspect; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.AspectSpec; @@ -37,7 +37,12 @@ public class WeaklyTypedAspectsResolver implements DataFetcher getProductAnalyticsCharts(Authentication authentica final List charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); + final DateTime startOfThisMonth = dateUtil.getStartOfThisMonth(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); @@ -103,7 +104,7 @@ private List getProductAnalyticsCharts(Authentication authentica charts.add( getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), - startOfNextMonth.minusMillis(1), + startOfThisMonth.minusMillis(1), "Monthly Active Users", DateInterval.MONTH)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index 31a8359f8f0e3a..c4c353f6eb8dbd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -2,9 +2,9 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.analytics.service.AnalyticsUtil; import com.linkedin.datahub.graphql.generated.AnalyticsChart; import com.linkedin.datahub.graphql.generated.AnalyticsChartGroup; @@ -12,8 +12,7 @@ import com.linkedin.datahub.graphql.generated.BarSegment; import com.linkedin.datahub.graphql.generated.MetadataAnalyticsInput; import com.linkedin.datahub.graphql.generated.NamedBar; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Filter; @@ -22,6 +21,7 @@ import com.linkedin.metadata.search.utils.QueryUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -41,7 +41,7 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final Authentication authentication = ResolverUtils.getAuthentication(environment); + final QueryContext context = environment.getContext(); final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); @@ -49,7 +49,7 @@ public final List get(DataFetchingEnvironment environment) final AnalyticsChartGroup group = new AnalyticsChartGroup(); group.setGroupId("FilteredMetadataAnalytics"); group.setTitle(""); - group.setCharts(getCharts(input, authentication)); + group.setCharts(getCharts(input, context.getOperationContext())); return ImmutableList.of(group); } catch (Exception e) { log.error("Failed to retrieve metadata analytics!", e); @@ -57,8 +57,8 @@ public final List get(DataFetchingEnvironment environment) } } - private List getCharts( - MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List getCharts(MetadataAnalyticsInput input, OperationContext opContext) + throws Exception { final List charts = new ArrayList<>(); List entities = Collections.emptyList(); @@ -77,8 +77,7 @@ private List getCharts( } SearchResult searchResult = - _entityClient.searchAcrossEntities( - entities, query, filter, 0, 0, null, null, authentication); + _entityClient.searchAcrossEntities(opContext, entities, query, filter, 0, 0, null, null); List aggregationMetadataList = searchResult.getMetadata().getAggregations(); @@ -96,7 +95,7 @@ private List getCharts( Constants.DOMAIN_ENTITY_NAME, ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, - authentication); + opContext.getSessionAuthentication()); charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); } @@ -113,7 +112,7 @@ private List getCharts( Constants.DATA_PLATFORM_ENTITY_NAME, ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, - authentication); + opContext.getSessionAuthentication()); charts.add( BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); } @@ -132,7 +131,7 @@ private List getCharts( ImmutableSet.of( Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, - authentication); + opContext.getSessionAuthentication()); charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 03333bda05f61f..72643ccac6325c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.NamedLine; import com.linkedin.datahub.graphql.generated.NumericDataPoint; import com.linkedin.datahub.graphql.generated.Row; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.util.List; import java.util.Map; @@ -220,7 +220,8 @@ private List extractBarSegmentsFromAggregations( .collect(Collectors.toList()); } - public Row buildRow(String groupByValue, Function groupByValueToCell, int count) { + public static Row buildRow( + String groupByValue, Function groupByValueToCell, int count) { List values = ImmutableList.of(groupByValue, String.valueOf(count)); List cells = ImmutableList.of( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index be7f4d2f0897ab..20f0bd7631faae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -49,7 +49,7 @@ public static Cell buildCellWithEntityLandingPage(String urn) { Cell result = new Cell(); result.setValue(urn); try { - Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); + Entity entity = UrnToEntityMapper.map(null, Urn.createFromString(urn)); result.setEntity(entity); result.setLinkParams( LinkParams.builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 6ba3777d476cb6..777d0982644ccb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.authorization; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.datahub.authorization.AuthUtil.VIEW_RESTRICTED_ENTITY_TYPES; +import static com.datahub.authorization.AuthUtil.canViewEntity; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.ConjunctivePrivilegeGroup; @@ -9,42 +12,54 @@ import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; -import java.time.Clock; +import io.datahubproject.metadata.context.OperationContext; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.List; -import java.util.Optional; +import java.util.Set; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.reflect.ConstructorUtils; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.commons.lang3.reflect.MethodUtils; +import org.codehaus.plexus.util.StringUtils; +@Slf4j public class AuthorizationUtils { - private static final Clock CLOCK = Clock.systemUTC(); + private static final String GRAPHQL_GENERATED_PACKAGE = "com.linkedin.datahub.graphql.generated"; - public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp() - .setTime(CLOCK.millis()) - .setActor(UrnUtils.getUrn(context.getActorUrn())); - } + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized( - context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(CORP_USER_ENTITY_NAME, CORP_GROUP_ENTITY_NAME)); } public static boolean canManagePolicies(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized( - context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } public static boolean canManageTokens(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(ACCESS_TOKEN_ENTITY_NAME)); } /** @@ -60,12 +75,13 @@ public static boolean canCreateDomains(@Nonnull QueryContext context) { new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageDomains(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } /** @@ -81,24 +97,25 @@ public static boolean canCreateTags(@Nonnull QueryContext context) { new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageTags(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TAGS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TAGS_PRIVILEGE); } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized( - context, - Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), - PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return AuthUtil.isAuthorizedEntityUrns( + context.getAuthorizer(), context.getActorUrn(), DELETE, List.of(entityUrn)); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized( - context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } public static boolean canEditGroupMembers( @@ -110,7 +127,7 @@ public static boolean canEditGroupMembers( new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( + return isAuthorized( context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, @@ -129,28 +146,45 @@ public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context ImmutableList.of( PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_VIEWS); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOBAL_VIEWS); } public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); + } + + public static boolean canEditProperties(@Nonnull Urn targetUrn, @Nonnull QueryContext context) { + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PROPERTIES_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); } public static boolean canEditEntityQueries( @@ -190,23 +224,149 @@ public static boolean canDeleteQuery( return canEditEntityQueries(subjectUrns, context); } - public static boolean isAuthorized( - @Nonnull QueryContext context, - @Nonnull Optional resourceSpec, - @Nonnull PoliciesConfig.Privilege privilege) { - final Authorizer authorizer = context.getAuthorizer(); - final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized( - authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + /** + * Can view relationship logic goes here. Should be considered directionless for now. Or direction + * added to the interface. + * + * @param opContext + * @param a + * @param b + * @return + */ + public static boolean canViewRelationship( + @Nonnull OperationContext opContext, @Nonnull Urn a, @Nonnull Urn b) { + // TODO relationships filter + return true; } - public static boolean isAuthorized( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { - return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); + /* + * Optionally check view permissions against a list of urns if the config option is enabled + */ + public static boolean canView(@Nonnull OperationContext opContext, @Nonnull Urn urn) { + // if search authorization is disabled, skip the view permission check + if (opContext.getOperationContextConfig().getViewAuthorizationConfiguration().isEnabled() + && !opContext.isSystemAuth() + && VIEW_RESTRICTED_ENTITY_TYPES.contains(urn.getEntityType())) { + + return opContext + .getViewAuthorizationContext() + .map( + viewAuthContext -> { + + // check cache + if (viewAuthContext.canView(Set.of(urn))) { + return true; + } + + if (!canViewEntity( + opContext.getSessionAuthentication().getActor().toUrnStr(), + opContext.getAuthorizerContext().getAuthorizer(), + urn)) { + return false; + } + + // cache viewable urn + viewAuthContext.addViewableUrns(Set.of(urn)); + return true; + }) + .orElse(false); + } + return true; + } + + public static T restrictEntity(@Nonnull Object entity, Class clazz) { + List allFields = FieldUtils.getAllFieldsList(entity.getClass()); + try { + Object[] args = + allFields.stream() + .map( + field -> { + // properties are often not required but only because + // they are a `one of` non-null. + // i.e. ChartProperties or ChartEditableProperties are required. + if (field.getAnnotation(javax.annotation.Nonnull.class) != null + || field.getName().toLowerCase().contains("properties")) { + try { + switch (field.getName()) { + // pass through to the restricted entity + case "name": + case "type": + case "urn": + case "chartId": + case "id": + case "jobId": + case "flowId": + Method fieldGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return fieldGetter.invoke(entity, (Object[]) null); + default: + switch (field.getType().getSimpleName()) { + case "Boolean": + Method boolGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return boolGetter.invoke(entity, (Object[]) null); + // mask these fields in the restricted entity + case "String": + return ""; + case "Integer": + return 0; + case "Long": + return 0L; + case "Double": + return 0.0; + case "List": + return List.of(); + default: + if (Enum.class.isAssignableFrom(field.getType())) { + // pass through enum + Method enumGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return enumGetter.invoke(entity, (Object[]) null); + } else if (entity + .getClass() + .getPackage() + .getName() + .contains(GRAPHQL_GENERATED_PACKAGE)) { + // handle nested fields recursively + Method getter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + Object nestedEntity = getter.invoke(entity, (Object[]) null); + if (nestedEntity == null) { + return null; + } else { + return restrictEntity(nestedEntity, getter.getReturnType()); + } + } + log.error( + String.format( + "Failed to resolve non-null field: Object:%s Field:%s FieldType: %s", + entity.getClass().getName(), + field.getName(), + field.getType().getName())); + } + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + } + return (Object) null; + }) + .toArray(); + return ConstructorUtils.invokeConstructor(clazz, args); + } catch (NoSuchMethodException + | IllegalAccessException + | InvocationTargetException + | InstantiationException e) { + throw new RuntimeException(e); + } } public static boolean isAuthorized( @@ -216,7 +376,16 @@ public static boolean isAuthorized( @Nonnull String resource, @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); - return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); + return AuthUtil.isAuthorized(authorizer, actor, privilegeGroup, resourceSpec); + } + + public static boolean isViewDatasetUsageAuthorized( + final QueryContext context, final Urn resourceUrn) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE, + new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())); } private AuthorizationUtils() {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 7c3ea1d581b6ed..48c57b5c641576 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -6,41 +6,57 @@ import graphql.execution.DataFetcherExceptionHandlerResult; import graphql.execution.ResultPath; import graphql.language.SourceLocation; +import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; @PublicApi @Slf4j public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { + private static final String DEFAULT_ERROR_MESSAGE = "An unknown error occurred."; + @Override - public DataFetcherExceptionHandlerResult onException( + public CompletableFuture handleException( DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = handlerParameters.getException(); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); - log.error("Failed to execute DataFetcher", exception); - DataHubGraphQLErrorCode errorCode = DataHubGraphQLErrorCode.SERVER_ERROR; - String message = "An unknown error occurred."; + String message = DEFAULT_ERROR_MESSAGE; - // note: make sure to access the true error message via `getCause()` - if (exception.getCause() instanceof IllegalArgumentException) { + IllegalArgumentException illException = + findFirstThrowableCauseOfClass(exception, IllegalArgumentException.class); + if (illException != null) { + log.error("Failed to execute", illException); errorCode = DataHubGraphQLErrorCode.BAD_REQUEST; - message = exception.getCause().getMessage(); + message = illException.getMessage(); } - if (exception instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception).errorCode(); - message = exception.getMessage(); + DataHubGraphQLException graphQLException = + findFirstThrowableCauseOfClass(exception, DataHubGraphQLException.class); + if (graphQLException != null) { + log.error("Failed to execute", graphQLException); + errorCode = graphQLException.errorCode(); + message = graphQLException.getMessage(); } - if (exception.getCause() instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception.getCause()).errorCode(); - message = exception.getCause().getMessage(); + if (illException == null && graphQLException == null) { + log.error("Failed to execute", exception); } - DataHubGraphQLError error = new DataHubGraphQLError(message, path, sourceLocation, errorCode); - return DataFetcherExceptionHandlerResult.newResult().error(error).build(); + return CompletableFuture.completedFuture( + DataFetcherExceptionHandlerResult.newResult().error(error).build()); + } + + T findFirstThrowableCauseOfClass(Throwable throwable, Class clazz) { + while (throwable != null) { + if (clazz.isInstance(throwable)) { + return (T) throwable; + } else { + throwable = throwable.getCause(); + } + } + return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 07bd1fba5d8a86..8bc716f4ff4db5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -12,8 +12,11 @@ public class FeatureFlags { private boolean readOnlyModeEnabled = false; private boolean showSearchFiltersV2 = false; private boolean showBrowseV2 = false; + private boolean platformBrowseV2 = false; private PreProcessHooks preProcessHooks; private boolean showAcrylInfo = false; + private boolean erModelRelationshipFeatureEnabled = false; private boolean showAccessManagement = false; private boolean nestedDomainsEnabled = false; + private boolean schemaFieldEntityFetchEnabled = false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java deleted file mode 100644 index 9faf00e0211bda..00000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.linkedin.datahub.graphql.resolvers; - -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.authorization.PoliciesConfig; -import java.util.List; -import java.util.Optional; - -public class AuthUtils { - - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - - public static boolean isAuthorized( - String principal, List privilegeGroup, Authorizer authorizer) { - for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = - new AuthorizationRequest(principal, privilege, Optional.empty()); - final AuthorizationResult result = authorizer.authorize(request); - if (AuthorizationResult.Type.DENY.equals(result.getType())) { - return false; - } - } - return true; - } - - private AuthUtils() {} -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 5ab07701c15a23..3126f25546f658 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -28,8 +28,9 @@ public static CompletableFuture> batchLoadEntitiesOfSameType( .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); - List keyList = new ArrayList(); + final DataLoader loader = + dataLoaderRegistry.getDataLoader(filteredEntity.name()); + List keyList = new ArrayList(); for (Entity entity : entities) { keyList.add(filteredEntity.getKeyProvider().apply(entity)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index b480e287adb9bc..30817d1c621529 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.datahub.authorization.AuthUtil.isAuthorized; +import static com.datahub.authorization.AuthUtil.isAuthorizedEntityType; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.ApiGroup.ANALYTICS; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -22,7 +24,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Collections; -import java.util.Optional; +import java.util.List; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; @@ -59,7 +61,7 @@ public CompletableFuture get(DataFetchingEnvironment environm null, context.getAuthentication()) .get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); + final CorpUser corpUser = CorpUserMapper.map(context, gmsUser, _featureFlags); // 2. Get platform privileges final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); @@ -98,22 +100,22 @@ public CompletableFuture get(DataFetchingEnvironment environm /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized( - context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized(context.getActorUrn(), context.getAuthorizer(), ANALYTICS, READ); } /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized( - context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized( - context.getAuthorizer(), + return isAuthorizedEntityType( context.getActorUrn(), - PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + context.getAuthorizer(), + MANAGE, + List.of(CORP_USER_ENTITY_NAME, CORP_GROUP_ENTITY_NAME)); } /** Returns true if the authenticated user has privileges to generate personal access tokens */ @@ -155,15 +157,4 @@ private boolean canManageUserCredentials(@Nonnull QueryContext context) { context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - - /** - * Returns true if the provided actor is authorized for a particular privilege, false otherwise. - */ - private boolean isAuthorized( - final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = - new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); - final AuthorizationResult result = authorizer.authorize(request); - return AuthorizationResult.Type.ALLOW.equals(result.getType()); - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index 2a074b950d0ff5..005fa6de22adf4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -70,7 +70,9 @@ public CompletableFuture get(DataFetchingEnvironment e // Step 2: Bind profiles into GraphQL strong types. List runEvents = - aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + aspects.stream() + .map(a -> AssertionRunEventMapper.map(context, a)) + .collect(Collectors.toList()); // Step 3: Package and return response. final AssertionRunEventsResult result = new AssertionRunEventsResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 89912b2814e400..b1924ef025c2d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -8,7 +10,6 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -24,10 +25,10 @@ public class DeleteAssertionResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; public DeleteAssertionResolver( - final EntityClient entityClient, final EntityService entityService) { + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @@ -41,7 +42,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) () -> { // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { + if (!_entityService.exists(assertionUrn, true)) { return true; } @@ -104,7 +105,7 @@ private boolean isAuthorizedToDeleteAssertionFromAssertee( final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, + ALL_PRIVILEGES_GROUP, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index 9814589df76514..261c1592d3f96d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -4,6 +4,7 @@ import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Entity; @@ -25,8 +26,10 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; /** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +@Slf4j public class EntityAssertionsResolver implements DataFetcher> { @@ -49,6 +52,8 @@ public CompletableFuture get(DataFetchingEnvironment env final String entityUrn = ((Entity) environment.getSource()).getUrn(); final Integer start = environment.getArgumentOrDefault("start", 0); final Integer count = environment.getArgumentOrDefault("count", 200); + final Boolean includeSoftDeleted = + environment.getArgumentOrDefault("includeSoftDeleted", false); try { // Step 1: Fetch set of assertions associated with the target entity from the Graph @@ -83,7 +88,8 @@ public CompletableFuture get(DataFetchingEnvironment env final List assertions = gmsResults.stream() .filter(Objects::nonNull) - .map(AssertionMapper::map) + .map(r -> AssertionMapper.map(context, r)) + .filter(assertion -> assertionExists(assertion, includeSoftDeleted, context)) .collect(Collectors.toList()); // Step 4: Package and return result @@ -98,4 +104,17 @@ public CompletableFuture get(DataFetchingEnvironment env } }); } + + private boolean assertionExists( + Assertion assertion, Boolean includeSoftDeleted, QueryContext context) { + try { + return _entityClient.exists( + UrnUtils.getUrn(assertion.getUrn()), includeSoftDeleted, context.getAuthentication()); + } catch (RemoteInvocationException e) { + log.error( + String.format("Unable to check if assertion %s exists, ignoring it", assertion.getUrn()), + e); + return false; + } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java new file mode 100644 index 00000000000000..c3e14565e0e27f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java @@ -0,0 +1,59 @@ +package com.linkedin.datahub.graphql.resolvers.auth; + +import com.datahub.authentication.token.StatefulTokenService; +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.types.auth.AccessTokenMetadataType; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class GetAccessTokenMetadataResolver + implements DataFetcher> { + + private final StatefulTokenService _tokenService; + private final EntityClient _entityClient; + + public GetAccessTokenMetadataResolver( + final StatefulTokenService tokenService, EntityClient entityClient) { + _tokenService = tokenService; + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String token = environment.getArgument("token"); + log.info("User {} requesting access token metadata information.", context.getActorUrn()); + if (!AuthorizationUtils.canManageTokens(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + AccessTokenMetadataType metadataType = new AccessTokenMetadataType(_entityClient); + final String tokenHash = _tokenService.hash(token); + final String tokenUrn = _tokenService.tokenUrnFromKey(tokenHash).toString(); + try { + List> batchLoad = + metadataType.batchLoad(ImmutableList.of(tokenUrn), context); + if (batchLoad.isEmpty()) { + return null; + } + return batchLoad.get(0).getData(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index 5cfa80e394c5ff..83789ec488e64c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchResult; @@ -65,14 +64,15 @@ public CompletableFuture get(DataFetchingEnvironment envi .setOrder(SortOrder.DESCENDING); final SearchResult searchResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.ACCESS_TOKEN_ENTITY_NAME, "", buildFilter(filters, Collections.emptyList()), sortCriterion, start, - count, - getAuthentication(environment), - new SearchFlags().setFulltext(true)); + count); final List tokens = searchResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 292d6108b7a044..5443525e52cf13 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -2,21 +2,25 @@ import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResultV2; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -26,6 +30,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -35,6 +40,7 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final BrowseV2Input input = bindArgument(environment.getArgument("input"), BrowseV2Input.class); - final String entityName = EntityTypeMapper.getName(input.getType()); + final List entityNames = getEntityNames(input); final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; + final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); @@ -66,28 +73,41 @@ public CompletableFuture get(DataFetchingEnvironment environmen ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, + context.getOperationContext().withSearchFlags(flags -> searchFlags), + entityNames, pathStr, maybeResolvedView != null ? SearchUtils.combineFilters( - filter, maybeResolvedView.getDefinition().getFilter()) - : filter, + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, sanitizedQuery, start, - count, - context.getAuthentication()); - return mapBrowseResults(browseResults); + count); + return mapBrowseResults(context, browseResults); } catch (Exception e) { throw new RuntimeException("Failed to execute browse V2", e); } }); } - private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { + public static List getEntityNames(BrowseV2Input input) { + List entityTypes; + if (input.getTypes() != null && input.getTypes().size() > 0) { + entityTypes = input.getTypes(); + } else if (input.getType() != null) { + entityTypes = ImmutableList.of(input.getType()); + } else { + entityTypes = BROWSE_ENTITY_TYPES; + } + return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } + + private BrowseResultsV2 mapBrowseResults( + @Nullable QueryContext context, BrowseResultV2 browseResults) { BrowseResultsV2 results = new BrowseResultsV2(); results.setTotal(browseResults.getNumGroups()); results.setStart(browseResults.getFrom()); @@ -103,7 +123,7 @@ private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { browseGroup.setCount(group.getCount()); browseGroup.setHasSubGroups(group.isHasSubGroups()); if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); + browseGroup.setEntity(UrnToEntityMapper.map(context, group.getUrn())); } groups.add(browseGroup); }); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java index a2d04a26bfa97d..68468f195a5af0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java @@ -1,14 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.chart; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChartStatsSummary; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -16,12 +12,9 @@ public class ChartStatsSummaryResolver implements DataFetcher> { private final TimeseriesAspectService timeseriesAspectService; - private final Cache summaryCache; public ChartStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = - CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(6, TimeUnit.HOURS).build(); } @Override diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index 34f7f133f6fb94..d884afb36a280a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -126,9 +126,15 @@ public CompletableFuture get(final DataFetchingEnvironment environmen appConfig.setAuthConfig(authConfig); final VisualConfig visualConfig = new VisualConfig(); - if (_visualConfiguration != null && _visualConfiguration.getAssets() != null) { - visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); - visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + if (_visualConfiguration != null) { + if (_visualConfiguration.getAssets() != null) { + visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); + visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + } + if (_visualConfiguration.getAppTitle() != null) { + visualConfig.setAppTitle(_visualConfiguration.getAppTitle()); + } + visualConfig.setHideGlossary(_visualConfiguration.isHideGlossary()); } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); @@ -173,8 +179,11 @@ public CompletableFuture get(final DataFetchingEnvironment environmen .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) .setShowBrowseV2(_featureFlags.isShowBrowseV2()) .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) + .setErModelRelationshipFeatureEnabled( + _featureFlags.isErModelRelationshipFeatureEnabled()) .setShowAccessManagement(_featureFlags.isShowAccessManagement()) .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2()) .build(); appConfig.setFeatureFlags(featureFlagsConfig); @@ -255,6 +264,10 @@ private EntityType mapResourceTypeToEntityType(final String resourceType) { .getResourceType() .equals(resourceType)) { return EntityType.CORP_USER; + } else if (com.linkedin.metadata.authorization.PoliciesConfig.ER_MODEL_RELATIONSHIP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { + return EntityType.ER_MODEL_RELATIONSHIP; } else { return null; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 58f7715c3e627e..f9cb75052dcc44 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -78,7 +78,9 @@ public CompletableFuture get(final DataFetchingEnvironment enviro .setValue(urn); return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( + context.getOperationContext(), CONTAINABLE_ENTITY_NAMES, query, new Filter() @@ -90,8 +92,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro start, count, null, - null, - context.getAuthentication())); + null)); } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 9502fb8e5cb931..82fbc8ad412689 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -48,7 +48,7 @@ private void aggregateParentContainers( _entityClient.getV2( containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); if (response != null) { - Container mappedContainer = ContainerMapper.map(response); + Container mappedContainer = ContainerMapper.map(context, response); containers.add(mappedContainer); aggregateParentContainers(containers, mappedContainer.getUrn(), context); } @@ -70,8 +70,11 @@ public CompletableFuture get(DataFetchingEnvironment env try { aggregateParentContainers(containers, urn, context); final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); + + List viewable = new ArrayList<>(containers); + + result.setCount(viewable.size()); + result.setContainers(viewable); return result; } catch (DataHubGraphQLException e) { throw new RuntimeException("Failed to load all containers", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index b5480359bde6a5..717360e58e184b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -1,11 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.isViewDatasetUsageAuthorized; import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; @@ -17,8 +17,8 @@ import graphql.schema.DataFetchingEnvironment; import java.util.List; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -29,39 +29,38 @@ public class DashboardStatsSummaryResolver private static final Integer MAX_TOP_USERS = 5; private final TimeseriesAspectService timeseriesAspectService; - private final Cache summaryCache; public DashboardStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = - CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite( - 6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); } @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); + final QueryContext context = environment.getContext(); return CompletableFuture.supplyAsync( () -> { - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - try { + // TODO: We don't have a dashboard specific priv + if (!isViewDatasetUsageAuthorized(context, resourceUrn)) { + log.debug( + "User {} is not authorized to view usage information for {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + final DashboardStatsSummary result = new DashboardStatsSummary(); // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + context, resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); + result.setViewCount(getDashboardViewCount(context, resourceUrn)); } // Obtain unique user statistics, by rolling up unique users over the past month. @@ -73,7 +72,6 @@ public CompletableFuture get(DataFetchingEnvironment envi .map(DashboardUserUsageCounts::getUser) .collect(Collectors.toList()))); - this.summaryCache.put(resourceUrn, result); return result; } catch (Exception e) { @@ -87,10 +85,10 @@ public CompletableFuture get(DataFetchingEnvironment envi }); } - private int getDashboardViewCount(final Urn resourceUrn) { + private int getDashboardViewCount(@Nullable QueryContext context, final Urn resourceUrn) { List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + context, resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } @@ -102,7 +100,7 @@ private List getDashboardUsagePerUser(final Urn resour return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); } - private List trimUsers(final List originalUsers) { + private static List trimUsers(final List originalUsers) { if (originalUsers.size() > MAX_TOP_USERS) { return originalUsers.subList(0, MAX_TOP_USERS); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 07d028b07b01d3..2dd4654e88466f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -4,6 +4,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUsageQueryResult; @@ -26,6 +27,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** @@ -46,6 +48,7 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); @@ -70,14 +73,18 @@ public CompletableFuture get(DataFetchingEnvironment // Absolute usage metrics List dashboardUsageMetrics = getDashboardUsageMetrics( - dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + context, dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); usageQueryResult.setMetrics(dashboardUsageMetrics); return usageQueryResult; }); } private List getDashboardUsageMetrics( - String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, Integer maybeLimit) { + @Nullable QueryContext context, + String dashboardUrn, + Long maybeStartTimeMillis, + Long maybeEndTimeMillis, + Integer maybeLimit) { List dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -104,7 +111,9 @@ private List getDashboardUsageMetrics( maybeLimit, filter); dashboardUsageMetrics = - aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + aspects.stream() + .map(a -> DashboardUsageMetricMapper.map(context, a)) + .collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 4f170a296c47e1..e54ae184c91ff0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregationMetrics; @@ -31,6 +32,7 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class DashboardUsageStatsUtils { @@ -40,6 +42,7 @@ public class DashboardUsageStatsUtils { public static final String ES_NULL_VALUE = "NULL"; public static List getDashboardUsageMetrics( + @Nullable QueryContext context, String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, @@ -58,7 +61,9 @@ public static List getDashboardUsageMetrics( maybeLimit, filter); dashboardUsageMetrics = - aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + aspects.stream() + .map(m -> DashboardUsageMetricMapper.map(context, m)) + .collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index 10c487a839f358..a6d6e9debccec7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -47,6 +47,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm try { final Urn dataProductUrn = _dataProductService.createDataProduct( + input.getId(), input.getProperties().getName(), input.getProperties().getDescription(), authentication); @@ -55,7 +56,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); if (response != null) { - return DataProductMapper.map(response); + return DataProductMapper.map(context, response); } // should never happen log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index a0f1698bf99e82..ee99adc01fccbd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -12,9 +12,9 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.dataproduct.DataProductAssociation; import com.linkedin.dataproduct.DataProductProperties; @@ -134,10 +134,12 @@ public CompletableFuture get(DataFetchingEnvironment environment) ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); + } else { + searchFlags = null; } try { @@ -150,15 +152,17 @@ public CompletableFuture get(DataFetchingEnvironment environment) count); return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), finalEntityNames, sanitizedQuery, finalFilter, start, count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); + null)); } catch (Exception e) { log.error( "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 304ef96d90aa51..71d562048a8e9e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -62,7 +62,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); if (response != null) { - return DataProductMapper.map(response); + return DataProductMapper.map(context, response); } // should never happen log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 74fbd9c2c868a4..37ab6ac4575b42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -1,8 +1,5 @@ package com.linkedin.datahub.graphql.resolvers.dataset; -import com.datahub.authorization.EntitySpec; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -10,7 +7,6 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.usage.UsageClient; import com.linkedin.usage.UsageTimeRange; import com.linkedin.usage.UserUsageCounts; @@ -18,9 +14,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -36,16 +30,9 @@ public class DatasetStatsSummaryResolver private static final Integer MAX_TOP_USERS = 5; private final UsageClient usageClient; - private final Cache summaryCache; public DatasetStatsSummaryResolver(final UsageClient usageClient) { this.usageClient = usageClient; - this.summaryCache = - CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite( - 6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); } @Override @@ -56,13 +43,8 @@ public CompletableFuture get(DataFetchingEnvironment enviro return CompletableFuture.supplyAsync( () -> { - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - try { - - if (!isAuthorized(resourceUrn, context)) { + if (!AuthorizationUtils.isViewDatasetUsageAuthorized(context, resourceUrn)) { log.debug( "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), @@ -88,7 +70,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro createPartialUser(Objects.requireNonNull(userCounts.getUser()))) .collect(Collectors.toList()))); } - this.summaryCache.put(resourceUrn, result); + return result; } catch (Exception e) { log.error( @@ -112,11 +94,4 @@ private CorpUser createPartialUser(final Urn userUrn) { result.setUrn(userUrn.toString()); return result; } - - private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized( - context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index 75288ec989c79d..b2348eb2d8f4ce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -1,19 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.dataset; -import com.datahub.authorization.EntitySpec; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.isViewDatasetUsageAuthorized; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.UsageQueryResult; import com.linkedin.datahub.graphql.types.usage.UsageQueryResultMapper; -import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.usage.UsageClient; import com.linkedin.usage.UsageTimeRange; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; @@ -35,7 +34,7 @@ public CompletableFuture get(DataFetchingEnvironment environme return CompletableFuture.supplyAsync( () -> { - if (!isAuthorized(resourceUrn, context)) { + if (!isViewDatasetUsageAuthorized(context, resourceUrn)) { log.debug( "User {} is not authorized to view usage information for dataset {}", context.getActorUrn(), @@ -45,18 +44,13 @@ public CompletableFuture get(DataFetchingEnvironment environme try { com.linkedin.usage.UsageQueryResult usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); + return UsageQueryResultMapper.map(context, usageQueryResult); } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + log.error(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + MetricUtils.counter(this.getClass(), "usage_stats_dropped").inc(); } - }); - } - private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized( - context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + return UsageQueryResultMapper.EMPTY; + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 62c88c506ba613..ed3eca0b78b605 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.metadata.Constants.*; @@ -13,7 +14,6 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -37,7 +37,7 @@ public class UpdateDeprecationResolver implements DataFetcher _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -50,7 +50,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { + if (!isAuthorizedToUpdateDeprecationForEntity(context, entityUrn)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } @@ -89,7 +89,7 @@ private boolean isAuthorizedToUpdateDeprecationForEntity( final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, + ALL_PRIVILEGES_GROUP, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); @@ -101,9 +101,10 @@ private boolean isAuthorizedToUpdateDeprecationForEntity( orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { + public static Boolean validateUpdateDeprecationInput( + Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index 9099394d32bd0b..19809170aad38c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -2,7 +2,6 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.AuditStamp; @@ -16,7 +15,6 @@ import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.CreateDomainInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.domain.DomainProperties; @@ -100,14 +98,8 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists( - UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } OwnerUtils.addCreatorAsOwner( - context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + context, domainUrn, OwnerEntityType.CORP_USER, _entityService); return domainUrn; } catch (DataHubGraphQLException e) { throw e; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 8f6d109e71b2c5..905c992a0b65f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -7,7 +7,7 @@ import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; @@ -85,7 +85,9 @@ public CompletableFuture get(final DataFetchingEnvironment enviro } return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( + context.getOperationContext(), SEARCHABLE_ENTITY_TYPES.stream() .map(EntityTypeMapper::getName) .collect(Collectors.toList()), @@ -97,8 +99,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro start, count, null, - null, - context.getAuthentication())); + null)); } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 5453603f4cc9f9..fe4a7f23cfaab6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -62,6 +61,7 @@ public CompletableFuture get(final DataFetchingEnvironment en // First, get all domain Urns. final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.DOMAIN_ENTITY_NAME, query, filter, @@ -69,9 +69,7 @@ public CompletableFuture get(final DataFetchingEnvironment en .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Now that we have entities we can bind this to a result. final ListDomainsResult result = new ListDomainsResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index 8406e198104689..3478cffb032b20 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canViewRelationship; import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; import com.linkedin.common.urn.Urn; @@ -16,6 +17,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; public class ParentDomainsResolver implements DataFetcher> { @@ -50,9 +52,20 @@ public CompletableFuture get(DataFetchingEnvironment enviro Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } + List viewable = + parentDomains.stream() + .filter( + e -> + context == null + || canViewRelationship( + context.getOperationContext(), + UrnUtils.getUrn(e.getUrn()), + urn)) + .collect(Collectors.toList()); + final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); + result.setCount(viewable.size()); + result.setDomains(viewable); return result; } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 1c52f707c61a4a..4d2e93be42fcba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -28,7 +28,7 @@ public class SetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -74,16 +74,16 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } public static Boolean validateSetDomainInput( - Urn entityUrn, Urn domainUrn, EntityService entityService) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { + if (!entityService.exists(domainUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); } - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to add Entity %s to Domain %s. Entity does not exist.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index b2a82ac7608d89..c415d933e4a3a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -29,7 +29,7 @@ public class UnsetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -71,9 +71,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw }); } - public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(entityUrn, true)) { throw new IllegalArgumentException( String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index e1b264606074c6..caaf76b0d1dc81 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -82,7 +82,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw */ private static void validateUpdateEmbedInput( @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { - if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { + if (!entityService.exists(UrnUtils.getUrn(input.getUrn()), true)) { throw new IllegalArgumentException( String.format( "Failed to update embed for entity with urn %s. Entity does not exist!", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index d2bd2f3fb8a17d..257f0a4efd2601 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -12,9 +12,9 @@ /** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; - public EntityExistsResolver(final EntityService entityService) { + public EntityExistsResolver(final EntityService entityService) { _entityService = entityService; } @@ -32,7 +32,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) return CompletableFuture.supplyAsync( () -> { try { - return _entityService.exists(entityUrn); + return _entityService.exists(entityUrn, true); } catch (Exception e) { throw new RuntimeException( String.format("Failed to check whether entity %s exists", entityUrn.toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index 751c6096de1a2d..82a9b6a939e6d2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.entity; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; + +import com.datahub.authorization.AuthUtil; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,11 +15,11 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Collections; +import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -52,15 +54,18 @@ public CompletableFuture get(DataFetchingEnvironment environme return getDataJobPrivileges(urn, context); default: log.warn( - "Tried to get entity privileges for entity type {} but nothing is implemented for it yet", + "Tried to get entity privileges for entity type {}. Adding common privileges only.", urn.getEntityType()); - return new EntityPrivileges(); + EntityPrivileges commonPrivileges = new EntityPrivileges(); + addCommonPrivileges(commonPrivileges, urn, context); + return commonPrivileges; } }); } private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); + addCommonPrivileges(result, termUrn, context); result.setCanManageEntity(false); if (GlossaryUtils.canManageGlossaries(context)) { result.setCanManageEntity(true); @@ -77,6 +82,7 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); + addCommonPrivileges(result, nodeUrn, context); result.setCanManageEntity(false); if (GlossaryUtils.canManageGlossaries(context)) { result.setCanManageEntity(true); @@ -97,49 +103,41 @@ private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext con } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - DisjunctivePrivilegeGroup orPrivilegesGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup( - Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - urn.getEntityType(), - urn.toString(), - orPrivilegesGroup); + return AuthUtil.isAuthorizedUrns( + context.getAuthorizer(), context.getActorUrn(), LINEAGE, UPDATE, List.of(urn)); } private EntityPrivileges getDatasetPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context)); result.setCanEditQueries(AuthorizationUtils.canCreateQuery(ImmutableList.of(urn), context)); + addCommonPrivileges(result, urn, context); return result; } private EntityPrivileges getChartPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context)); + addCommonPrivileges(result, urn, context); return result; } private EntityPrivileges getDashboardPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context)); + addCommonPrivileges(result, urn, context); return result; } private EntityPrivileges getDataJobPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); + addCommonPrivileges(result, urn, context); return result; } + + private void addCommonPrivileges( + @Nonnull EntityPrivileges result, @Nonnull Urn urn, @Nonnull QueryContext context) { + result.setCanEditLineage(canEditEntityLineage(urn, context)); + result.setCanEditProperties(AuthorizationUtils.canEditProperties(urn, context)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java new file mode 100644 index 00000000000000..39c9210c289e1d --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java @@ -0,0 +1,52 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class BatchAssignFormResolver implements DataFetcher> { + + private final FormService _formService; + + public BatchAssignFormResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final BatchAssignFormInput input = + bindArgument(environment.getArgument("input"), BatchAssignFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final List entityUrns = input.getEntityUrns(); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.batchAssignFormToEntities( + entityUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + formUrn, + authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolver.java new file mode 100644 index 00000000000000..a6590625a9ba3c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolver.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class BatchRemoveFormResolver implements DataFetcher> { + + private final FormService _formService; + + public BatchRemoveFormResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final BatchAssignFormInput input = + bindArgument(environment.getArgument("input"), BatchAssignFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final List entityUrns = input.getEntityUrns(); + final Authentication authentication = context.getAuthentication(); + + // TODO: (PRD-1062) Add permission check once permission exists + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.batchUnassignFormForEntities( + entityUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + formUrn, + authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java new file mode 100644 index 00000000000000..5b5f058dbdeac2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java @@ -0,0 +1,50 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class CreateDynamicFormAssignmentResolver + implements DataFetcher> { + + private final FormService _formService; + + public CreateDynamicFormAssignmentResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final CreateDynamicFormAssignmentInput input = + bindArgument(environment.getArgument("input"), CreateDynamicFormAssignmentInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final DynamicFormAssignment formAssignment = FormUtils.mapDynamicFormAssignment(input); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + try { + _formService.createDynamicFormAssignment(formAssignment, formUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java new file mode 100644 index 00000000000000..e7bf87ae7644ea --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java @@ -0,0 +1,80 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IsFormAssignedToMeResolver implements DataFetcher> { + + private final GroupService _groupService; + + public IsFormAssignedToMeResolver(@Nonnull final GroupService groupService) { + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final FormActorAssignment parent = environment.getSource(); + + return CompletableFuture.supplyAsync( + () -> { + try { + + // Assign urn and group urns + final Set assignedUserUrns = + parent.getUsers() != null + ? parent.getUsers().stream().map(CorpUser::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Set assignedGroupUrns = + parent.getGroups() != null + ? parent.getGroups().stream().map(CorpGroup::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + + // First check whether user is directly assigned. + if (assignedUserUrns.size() > 0) { + boolean isUserAssigned = assignedUserUrns.contains(userUrn.toString()); + if (isUserAssigned) { + return true; + } + } + + // Next check whether the user is assigned indirectly, by group. + if (assignedGroupUrns.size() > 0) { + final List groupUrns = + _groupService.getGroupsForUser(userUrn, context.getAuthentication()); + boolean isUserGroupAssigned = + groupUrns.stream() + .anyMatch(groupUrn -> assignedGroupUrns.contains(groupUrn.toString())); + if (isUserGroupAssigned) { + return true; + } + } + } catch (Exception e) { + log.error( + "Failed to determine whether the form is assigned to the currently authenticated user! Returning false.", + e); + } + + // Else the user is not directly assigned. + return false; + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java new file mode 100644 index 00000000000000..5b40c353b3809c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java @@ -0,0 +1,89 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.metadata.service.FormService; +import com.linkedin.structured.PrimitivePropertyValueArray; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class SubmitFormPromptResolver implements DataFetcher> { + + private final FormService _formService; + + public SubmitFormPromptResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final Urn entityUrn = UrnUtils.getUrn(environment.getArgument("urn")); + final SubmitFormPromptInput input = + bindArgument(environment.getArgument("input"), SubmitFormPromptInput.class); + final String promptId = input.getPromptId(); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final String fieldPath = input.getFieldPath(); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (input.getType().equals(FormPromptType.STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitStructuredPropertyPromptResponse( + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + context.getAuthentication()); + } else if (input.getType().equals(FormPromptType.FIELDS_STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + if (fieldPath == null) { + throw new IllegalArgumentException( + "Failed to provide fieldPath for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitFieldStructuredPropertyPromptResponse( + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + fieldPath, + context.getAuthentication()); + } + return false; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java new file mode 100644 index 00000000000000..54e3562c97addf --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java @@ -0,0 +1,63 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class VerifyFormResolver implements DataFetcher> { + + private final FormService _formService; + private final GroupService _groupService; + + public VerifyFormResolver( + @Nonnull final FormService formService, @Nonnull final GroupService groupService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final VerifyFormInput input = + bindArgument(environment.getArgument("input"), VerifyFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final Urn entityUrn = UrnUtils.getUrn(input.getEntityUrn()); + final Authentication authentication = context.getAuthentication(); + final Urn actorUrn = UrnUtils.getUrn(authentication.getActor().toUrnStr()); + + return CompletableFuture.supplyAsync( + () -> { + try { + final List groupsForUser = + _groupService.getGroupsForUser(actorUrn, authentication); + if (!_formService.isFormAssignedToUser( + formUrn, entityUrn, actorUrn, groupsForUser, authentication)) { + throw new AuthorizationException( + String.format( + "Failed to authorize form on entity as form with urn %s is not assigned to user", + formUrn)); + } + _formService.verifyFormForEntity(formUrn, entityUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 535dbbf70a4cbc..b6e8899a6a4540 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -12,6 +12,7 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; @@ -29,7 +30,8 @@ @RequiredArgsConstructor public class AddRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; + private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,13 +39,14 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final Urn urn = Urn.createFromString(input.getUrn()); return CompletableFuture.supplyAsync( () -> { - if (GlossaryUtils.canManageGlossaries(context)) { + final Urn parentUrn = GlossaryUtils.getParentUrn(urn, context, _entityClient); + if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { try { final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); final List termUrns = input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); validateRelatedTermsInput(urn, termUrns); @@ -91,7 +94,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || !_entityService.exists(urn)) { + || !_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format( "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); @@ -104,7 +107,7 @@ public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); - } else if (!_entityService.exists(termUrn)) { + } else if (!_entityService.exists(termUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index 815b4662e1ed24..b52153d70fa7b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -2,7 +2,6 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -13,7 +12,6 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; @@ -39,7 +37,6 @@ public class CreateGlossaryNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); @@ -72,19 +69,8 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists( - UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - OwnerUtils.addCreatorAsOwner( - context, - glossaryNodeUrn, - OwnerEntityType.CORP_USER, - ownershipType, - _entityService); + context, glossaryNodeUrn, OwnerEntityType.CORP_USER, _entityService); return glossaryNodeUrn; } catch (Exception e) { log.error( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index 90979fe918f71a..54fd7ef1fe04d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -2,7 +2,6 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -14,7 +13,6 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.EntityResponse; @@ -88,19 +86,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists( - UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } OwnerUtils.addCreatorAsOwner( - context, - glossaryTermUrn, - OwnerEntityType.CORP_USER, - ownershipType, - _entityService); + context, glossaryTermUrn, OwnerEntityType.CORP_USER, _entityService); return glossaryTermUrn; } catch (Exception e) { log.error( @@ -152,7 +140,7 @@ private Map getTermsWithSameParent(Urn parentNode, QueryCon final Filter filter = buildParentNodeFilter(parentNode); final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); + context.getOperationContext(), GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000); final List termUrns = searchResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index f623f0e34b3669..3dc3e93260665e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -15,10 +15,10 @@ public class DeleteGlossaryEntityResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; public DeleteGlossaryEntityResolver( - final EntityClient entityClient, EntityService entityService) { + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @@ -33,7 +33,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) return CompletableFuture.supplyAsync( () -> { if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { + if (!_entityService.exists(entityUrn, true)) { throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index e7990b1a343d83..700a38d50b317d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -53,12 +53,12 @@ public CompletableFuture get( final Filter filter = buildGlossaryEntitiesFilter(); final SearchResult gmsNodesResult = _entityClient.filter( + context.getOperationContext(), Constants.GLOSSARY_NODE_ENTITY_NAME, filter, null, start, - count, - context.getAuthentication()); + count); final List glossaryNodeUrns = gmsNodesResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index 40e4363dcff938..9669d406344e71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -53,12 +53,12 @@ public CompletableFuture get( final Filter filter = buildGlossaryEntitiesFilter(); final SearchResult gmsTermsResult = _entityClient.filter( + context.getOperationContext(), Constants.GLOSSARY_TERM_ENTITY_NAME, filter, null, start, - count, - context.getAuthentication()); + count); final List glossaryTermUrns = gmsTermsResult.getEntities().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index 850469f9965157..1ac5ced304dbe7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canViewRelationship; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -23,6 +25,7 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; public class ParentNodesResolver implements DataFetcher> { @@ -53,7 +56,7 @@ private void aggregateParentNodes(List nodes, String urn, QueryCon _entityClient.getV2( parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { - GlossaryNode mappedNode = GlossaryNodeMapper.map(response); + GlossaryNode mappedNode = GlossaryNodeMapper.map(context, response); nodes.add(mappedNode); aggregateParentNodes(nodes, mappedNode.getUrn(), context); } @@ -85,7 +88,7 @@ private GlossaryNode getTermParentNode(String urn, QueryContext context) { _entityClient.getV2( parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { - GlossaryNode mappedNode = GlossaryNodeMapper.map(response); + GlossaryNode mappedNode = GlossaryNodeMapper.map(context, response); return mappedNode; } } @@ -117,9 +120,20 @@ public CompletableFuture get(DataFetchingEnvironment environm aggregateParentNodes(nodes, urn, context); } + List viewable = + nodes.stream() + .filter( + e -> + context == null + || canViewRelationship( + context.getOperationContext(), + UrnUtils.getUrn(e.getUrn()), + UrnUtils.getUrn(urn))) + .collect(Collectors.toList()); + final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); + result.setCount(viewable.size()); + result.setNodes(viewable); return result; } catch (DataHubGraphQLException | URISyntaxException e) { throw new RuntimeException(("Failed to load parent nodes")); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 8c9b792b74e0dd..09181c08c0af5e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -11,6 +11,7 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; @@ -27,7 +28,8 @@ @RequiredArgsConstructor public class RemoveRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; + private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -35,18 +37,19 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final Urn urn = Urn.createFromString(input.getUrn()); return CompletableFuture.supplyAsync( () -> { - if (GlossaryUtils.canManageGlossaries(context)) { + final Urn parentUrn = GlossaryUtils.getParentUrn(urn, context, _entityClient); + if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { try { final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); final List termUrnsToRemove = input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || !_entityService.exists(urn)) { + || !_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format( "Failed to update %s. %s either does not exist or is not a glossaryTerm.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index e487ee00608d4a..cde7d4958a25ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -43,9 +43,10 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Create the Group key. final CorpGroupKey key = new CorpGroupKey(); final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + final String description = input.getDescription() != null ? input.getDescription() : ""; key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". return _groupService.createNativeGroup( - key, input.getName(), input.getDescription(), authentication); + key, input.getName(), description, authentication); } catch (Exception e) { throw new RuntimeException("Failed to create group", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index 93582fb956bd8d..8abe2378982930 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -6,7 +6,7 @@ import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; import com.linkedin.datahub.graphql.generated.EntityCountResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -41,10 +41,10 @@ public CompletableFuture get(final DataFetchingEnvironment e // First, get all counts Map gmsResult = _entityClient.batchGetTotalEntityCount( + context.getOperationContext(), input.getTypes().stream() .map(EntityTypeMapper::getName) - .collect(Collectors.toList()), - context.getAuthentication()); + .collect(Collectors.toList())); // bind to a result. List resultList = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index a6ad8698679f02..70be478d65c5c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.generated.ListGroupsResult; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -58,6 +57,9 @@ public CompletableFuture get(final DataFetchingEnvironment env // First, get all group Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), CORP_GROUP_ENTITY_NAME, query, null, @@ -65,9 +67,7 @@ public CompletableFuture get(final DataFetchingEnvironment env .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get hydrate all groups. final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java new file mode 100644 index 00000000000000..ad36621d20c668 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java @@ -0,0 +1,319 @@ +package com.linkedin.datahub.graphql.resolvers.health; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.EntityRelationships; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.Health; +import com.linkedin.datahub.graphql.generated.HealthStatus; +import com.linkedin.datahub.graphql.generated.HealthStatusType; +import com.linkedin.datahub.graphql.generated.IncidentState; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.timeseries.AggregationSpec; +import com.linkedin.timeseries.AggregationType; +import com.linkedin.timeseries.GenericTable; +import com.linkedin.timeseries.GroupingBucket; +import com.linkedin.timeseries.GroupingBucketType; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.extern.slf4j.Slf4j; + +/** + * Resolver for generating the health badge for an asset, which depends on + * + *

1. Assertions status - whether the asset has active assertions 2. Incidents status - whether + * the asset has active incidents + */ +@Slf4j +public class EntityHealthResolver implements DataFetcher>> { + private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; + private static final String ASSERTION_RUN_EVENT_SUCCESS_TYPE = "SUCCESS"; + private static final String INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME = "entities.keyword"; + private static final String INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME = "state"; + + private final EntityClient _entityClient; + private final GraphClient _graphClient; + private final TimeseriesAspectService _timeseriesAspectService; + + private final Config _config; + + public EntityHealthResolver( + @Nonnull final EntityClient entityClient, + @Nonnull final GraphClient graphClient, + @Nonnull final TimeseriesAspectService timeseriesAspectService) { + this(entityClient, graphClient, timeseriesAspectService, new Config(true, true)); + } + + public EntityHealthResolver( + @Nonnull final EntityClient entityClient, + @Nonnull final GraphClient graphClient, + @Nonnull final TimeseriesAspectService timeseriesAspectService, + @Nonnull final Config config) { + _entityClient = entityClient; + _graphClient = graphClient; + _timeseriesAspectService = timeseriesAspectService; + _config = config; + } + + @Override + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { + final Entity parent = environment.getSource(); + return CompletableFuture.supplyAsync( + () -> { + try { + final HealthStatuses statuses = + computeHealthStatusForAsset(parent.getUrn(), environment.getContext()); + return statuses.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve asset's health status.", e); + } + }); + } + + /** + * Computes the "resolved health status" for an asset by + * + *

- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing + */ + private HealthStatuses computeHealthStatusForAsset( + final String entityUrn, final QueryContext context) { + final List healthStatuses = new ArrayList<>(); + + if (_config.getIncidentsEnabled()) { + final Health incidentsHealth = computeIncidentsHealthForAsset(entityUrn, context); + if (incidentsHealth != null) { + healthStatuses.add(incidentsHealth); + } + } + + if (_config.getAssertionsEnabled()) { + final Health assertionsHealth = computeAssertionHealthForAsset(entityUrn, context); + if (assertionsHealth != null) { + healthStatuses.add(assertionsHealth); + } + } + + return new HealthStatuses(healthStatuses); + } + + /** + * Returns the resolved "incidents health", which is currently a static function of whether there + * are any active incidents open on an asset + * + * @param entityUrn the asset to compute health for + * @param context the query context + * @return an instance of {@link Health} for the entity, null if one cannot be computed. + */ + private Health computeIncidentsHealthForAsset( + final String entityUrn, final QueryContext context) { + try { + final Filter filter = buildIncidentsEntityFilter(entityUrn, IncidentState.ACTIVE.toString()); + final SearchResult searchResult = + _entityClient.filter( + context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1); + final Integer activeIncidentCount = searchResult.getNumEntities(); + if (activeIncidentCount > 0) { + // There are active incidents. + return new Health( + HealthStatusType.INCIDENTS, + HealthStatus.FAIL, + String.format( + "%s active incident%s", activeIncidentCount, activeIncidentCount > 1 ? "s" : ""), + ImmutableList.of("ACTIVE_INCIDENTS")); + } + // Report pass if there are no active incidents. + return new Health(HealthStatusType.INCIDENTS, HealthStatus.PASS, null, null); + } catch (RemoteInvocationException e) { + log.error("Failed to compute incident health status!", e); + return null; + } + } + + private Filter buildIncidentsEntityFilter(final String entityUrn, final String state) { + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, entityUrn); + criterionMap.put(INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME, state); + return QueryUtils.newFilter(criterionMap); + } + + /** + * TODO: Replace this with the assertions summary aspect. + * + *

Returns the resolved "assertions health", which is currently a static function of whether + * the most recent run of all asset assertions has succeeded. + * + * @param entityUrn the entity to compute health for + * @param context the query context + * @return an instance of {@link Health} for the asset, null if one cannot be computed. + */ + @Nullable + private Health computeAssertionHealthForAsset( + final String entityUrn, final QueryContext context) { + // Get active assertion urns + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); + + if (relationships.getTotal() > 0) { + + // If there are assertions defined, then we should return a non-null health for this asset. + final Set activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); + + final GenericTable assertionRunResults = getAssertionRunsTable(entityUrn); + + if (!assertionRunResults.hasRows() || assertionRunResults.getRows().size() == 0) { + // No assertion run results found. Return empty health! + return null; + } + + final List failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + + // Finally compute & return the health. + final Health health = new Health(); + health.setType(HealthStatusType.ASSERTIONS); + if (failingAssertionUrns.size() > 0) { + health.setStatus(HealthStatus.FAIL); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); + health.setCauses(failingAssertionUrns); + } else { + health.setStatus(HealthStatus.PASS); + health.setMessage("All assertions are passing"); + } + return health; + } + return null; + } + + private GenericTable getAssertionRunsTable(final String asserteeUrn) { + return _timeseriesAspectService.getAggregatedStats( + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + createAssertionAggregationSpecs(), + createAssertionsFilter(asserteeUrn), + createAssertionGroupingBuckets()); + } + + private List getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { + // Create the buckets based on the result + return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); + } + + private Filter createAssertionsFilter(final String datasetUrn) { + final Filter filter = new Filter(); + final ArrayList criteria = new ArrayList<>(); + + // Add filter for asserteeUrn == datasetUrn + Criterion datasetUrnCriterion = + new Criterion().setField("asserteeUrn").setCondition(Condition.EQUAL).setValue(datasetUrn); + criteria.add(datasetUrnCriterion); + + // Add filter for result == result + Criterion startTimeCriterion = + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + criteria.add(startTimeCriterion); + + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + return filter; + } + + private AggregationSpec[] createAssertionAggregationSpecs() { + // Simply fetch the timestamp, result type for the assertion URN. + AggregationSpec resultTypeAggregation = + new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); + AggregationSpec timestampAggregation = + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; + } + + private GroupingBucket[] createAssertionGroupingBuckets() { + // String grouping bucket on "assertionUrn" + GroupingBucket assertionUrnBucket = new GroupingBucket(); + assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); + return new GroupingBucket[] {assertionUrnBucket}; + } + + private List resultToFailedAssertionUrns( + final StringArrayArray rows, final Set activeAssertionUrns) { + final List failedAssertionUrns = new ArrayList<>(); + for (StringArray row : rows) { + // Result structure should be assertionUrn, event.result.type, timestampMillis + if (row.size() != 3) { + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); + } + + final String assertionUrn = row.get(0); + final String resultType = row.get(1); + + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + failedAssertionUrns.add(assertionUrn); + } + } + return failedAssertionUrns; + } + + @Data + @AllArgsConstructor + public static class Config { + private Boolean assertionsEnabled; + private Boolean incidentsEnabled; + } + + @AllArgsConstructor + private static class HealthStatuses { + private final List healths; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java new file mode 100644 index 00000000000000..089582b66836fe --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java @@ -0,0 +1,124 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityIncidentsResult; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.datahub.graphql.types.incident.IncidentMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityIncidentsResolver + implements DataFetcher> { + + static final String INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME = "entities.keyword"; + static final String INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME = "state"; + static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; + + private final EntityClient _entityClient; + + public EntityIncidentsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final Optional maybeState = Optional.ofNullable(environment.getArgument("state")); + + try { + // Step 1: Fetch set of incidents associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildIncidentsEntityFilter(entityUrn, maybeState); + final SortCriterion sortCriterion = buildIncidentsSortCriterion(); + final SearchResult searchResult = + _entityClient.filter( + context.getOperationContext(), + Constants.INCIDENT_ENTITY_NAME, + filter, + sortCriterion, + start, + count); + + final List incidentUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + Constants.INCIDENT_ENTITY_NAME, + new HashSet<>(incidentUrns), + null, + context.getAuthentication()); + + // Step 3: Map GMS incident model to GraphQL model + final List entityResult = new ArrayList<>(); + for (Urn urn : incidentUrns) { + entityResult.add(entities.getOrDefault(urn, null)); + } + final List incidents = + entityResult.stream() + .filter(Objects::nonNull) + .map(i -> IncidentMapper.map(context, i)) + .collect(Collectors.toList()); + + // Step 4: Package and return result + final EntityIncidentsResult result = new EntityIncidentsResult(); + result.setCount(searchResult.getPageSize()); + result.setStart(searchResult.getFrom()); + result.setTotal(searchResult.getNumEntities()); + result.setIncidents(incidents); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); + } + + private Filter buildIncidentsEntityFilter( + final String entityUrn, final Optional maybeState) { + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, entityUrn); + maybeState.ifPresent( + incidentState -> criterionMap.put(INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME, incidentState)); + return QueryUtils.newFilter(criterionMap); + } + + private SortCriterion buildIncidentsSortCriterion() { + final SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); + sortCriterion.setOrder(SortOrder.DESCENDING); + return sortCriterion; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java new file mode 100644 index 00000000000000..117ee59553d4c8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java @@ -0,0 +1,129 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.RaiseIncidentInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Resolver used for creating (raising) a new asset incident. */ +@Slf4j +@RequiredArgsConstructor +public class RaiseIncidentResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + + final QueryContext context = environment.getContext(); + final RaiseIncidentInput input = + bindArgument(environment.getArgument("input"), RaiseIncidentInput.class); + final Urn resourceUrn = Urn.createFromString(input.getResourceUrn()); + + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorizedToCreateIncidentForResource(resourceUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final IncidentKey key = new IncidentKey(); + + // Generate a random UUID for the incident + final String id = UUID.randomUUID().toString(); + key.setId(id); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + INCIDENT_ENTITY_NAME, + INCIDENT_INFO_ASPECT_NAME, + mapIncidentInfo(input, context)); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + log.error("Failed to create incident. {}", e.getMessage()); + throw new RuntimeException("Failed to incident", e); + } + }); + } + + private IncidentInfo mapIncidentInfo(final RaiseIncidentInput input, final QueryContext context) + throws URISyntaxException { + final IncidentInfo result = new IncidentInfo(); + result.setType( + IncidentType.valueOf( + input + .getType() + .name())); // Assumption Alert: This assumes that GMS incident type === GraphQL + // incident type. + result.setCustomType(input.getCustomType(), SetMode.IGNORE_NULL); + result.setTitle(input.getTitle(), SetMode.IGNORE_NULL); + result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + result.setEntities( + new UrnArray(ImmutableList.of(Urn.createFromString(input.getResourceUrn())))); + result.setCreated( + new AuditStamp() + .setActor(Urn.createFromString(context.getActorUrn())) + .setTime(System.currentTimeMillis())); + // Create the incident in the 'active' state by default. + result.setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setLastUpdated( + new AuditStamp() + .setActor(Urn.createFromString(context.getActorUrn())) + .setTime(System.currentTimeMillis()))); + result.setSource(new IncidentSource().setType(IncidentSourceType.MANUAL), SetMode.IGNORE_NULL); + result.setPriority(input.getPriority(), SetMode.IGNORE_NULL); + return result; + } + + private boolean isAuthorizedToCreateIncidentForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java new file mode 100644 index 00000000000000..c88847c13accd5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java @@ -0,0 +1,105 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; +import com.linkedin.datahub.graphql.generated.UpdateIncidentStatusInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; + +/** GraphQL Resolver that updates an incident's status */ +@RequiredArgsConstructor +public class UpdateIncidentStatusResolver implements DataFetcher> { + + private final EntityClient _entityClient; + private final EntityService _entityService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final Urn incidentUrn = Urn.createFromString(environment.getArgument("urn")); + final UpdateIncidentStatusInput input = + bindArgument(environment.getArgument("input"), UpdateIncidentStatusInput.class); + return CompletableFuture.supplyAsync( + () -> { + + // Check whether the incident exists. + IncidentInfo info = + (IncidentInfo) + EntityUtils.getAspectFromEntity( + incidentUrn.toString(), INCIDENT_INFO_ASPECT_NAME, _entityService, null); + + if (info != null) { + // Check whether the actor has permission to edit the incident + // Currently only supporting a single entity. TODO: Support multiple incident entities. + final Urn resourceUrn = info.getEntities().get(0); + if (isAuthorizedToUpdateIncident(resourceUrn, context)) { + info.setStatus( + new IncidentStatus() + .setState(IncidentState.valueOf(input.getState().name())) + .setLastUpdated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()))); + if (input.getMessage() != null) { + info.getStatus().setMessage(input.getMessage()); + } + try { + // Finally, create the MetadataChangeProposal. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + incidentUrn, INCIDENT_INFO_ASPECT_NAME, info); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to update incident status!", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + throw new DataHubGraphQLException( + "Failed to update incident. Incident does not exist.", + DataHubGraphQLErrorCode.NOT_FOUND); + }); + } + + private boolean isAuthorizedToUpdateIncident(final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 036780d4467013..24d0e946145054 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,29 +1,28 @@ package com.linkedin.datahub.graphql.resolvers.ingest; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.datahub.authorization.AuthUtil.isAuthorizedEntityType; +import static com.linkedin.metadata.Constants.INGESTION_SOURCE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.SECRETS_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthUtil; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.metadata.authorization.PoliciesConfig; +import java.util.List; import javax.annotation.Nonnull; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized( - principal, - ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), - authorizer); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(INGESTION_SOURCE_ENTITY_NAME)); } public static boolean canManageSecrets(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized( - principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(SECRETS_ENTITY_NAME)); } private IngestionAuthUtils() {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index ffa9dcf42d176f..800a41330346a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ExecutionRequest; import com.linkedin.datahub.graphql.generated.IngestionConfig; import com.linkedin.datahub.graphql.generated.IngestionSchedule; @@ -23,21 +24,23 @@ import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j public class IngestionResolverUtils { public static List mapExecutionRequests( - final Collection requests) { + @Nullable QueryContext context, final Collection requests) { List result = new ArrayList<>(); for (final EntityResponse request : requests) { - result.add(mapExecutionRequest(request)); + result.add(mapExecutionRequest(context, request)); } return result; } - public static ExecutionRequest mapExecutionRequest(final EntityResponse entityResponse) { + public static ExecutionRequest mapExecutionRequest( + @Nullable QueryContext context, final EntityResponse entityResponse) { final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -59,7 +62,7 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe inputResult.setSource(mapExecutionRequestSource(executionRequestInput.getSource())); } if (executionRequestInput.hasArgs()) { - inputResult.setArguments(StringMapMapper.map(executionRequestInput.getArgs())); + inputResult.setArguments(StringMapMapper.map(context, executionRequestInput.getArgs())); } inputResult.setRequestedAt(executionRequestInput.getRequestedAt()); result.setInput(inputResult); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 722ffe3aba6b84..4975b036399278 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -58,7 +58,7 @@ public CompletableFuture get(final DataFetchingEnvironment env DataHubGraphQLErrorCode.NOT_FOUND); } // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); + return IngestionResolverUtils.mapExecutionRequest(context, entities.get(urn)); } catch (Exception e) { throw new RuntimeException("Failed to retrieve execution request", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index 01100a24d6b15c..e3e4b692a45da2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -67,6 +67,7 @@ public CompletableFuture get( final SearchResult executionsSearchResult = _entityClient.filter( + context.getOperationContext(), Constants.EXECUTION_REQUEST_ENTITY_NAME, new Filter() .setOr( @@ -78,8 +79,7 @@ public CompletableFuture get( .setField(REQUEST_TIME_MS_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication()); + count); // 2. Batch fetch the related ExecutionRequests final Set relatedExecRequests = @@ -103,6 +103,7 @@ public CompletableFuture get( result.setTotal(executionsSearchResult.getNumEntities()); result.setExecutionRequests( IngestionResolverUtils.mapExecutionRequests( + context, executionsSearchResult.getEntities().stream() .map(searchResult -> entities.get(searchResult.getEntity())) .filter(Objects::nonNull) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 0b909dee513747..3e9583824a5680 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -44,7 +44,8 @@ public CompletableFuture rollbackIngestion( return CompletableFuture.supplyAsync( () -> { try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); + _entityClient.rollbackIngestion( + runId, context.getAuthorizer(), context.getAuthentication()); return true; } catch (Exception e) { throw new RuntimeException("Failed to rollback ingestion execution", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index 577780e53ce862..750e048e39ce66 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -6,19 +6,19 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateSecretInput; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; +import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.key.DataHubSecretKey; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.concurrent.CompletableFuture; /** @@ -58,14 +58,15 @@ public CompletableFuture get(final DataFetchingEnvironment environment) } // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated( - new AuditStamp() - .setActor(UrnUtils.getUrn(context.getActorUrn())) - .setTime(System.currentTimeMillis())); + final DataHubSecretValue value = + DataHubSecretValueMapper.map( + null, + input.getName(), + _secretService.encrypt(input.getValue()), + input.getDescription(), + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 67564aa721bdac..0205601d5ccdaa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -13,10 +13,10 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.HashSet; import java.util.List; import java.util.Map; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index eb054295af09b3..b3c7db20f45374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -17,7 +17,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -67,6 +66,9 @@ public CompletableFuture get(final DataFetchingEnvironment en // First, get all secrets final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.SECRETS_ENTITY_NAME, query, null, @@ -74,9 +76,7 @@ public CompletableFuture get(final DataFetchingEnvironment en .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, resolve all secrets final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java new file mode 100644 index 00000000000000..e3ad036159060e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java @@ -0,0 +1,82 @@ +package com.linkedin.datahub.graphql.resolvers.ingest.secret; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithUrn; +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateSecretInput; +import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; +import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.secret.DataHubSecretValue; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. + */ +@Slf4j +@RequiredArgsConstructor +public class UpdateSecretResolver implements DataFetcher> { + private final EntityClient entityClient; + private final SecretService secretService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final UpdateSecretInput input = + bindArgument(environment.getArgument("input"), UpdateSecretInput.class); + final Urn secretUrn = Urn.createFromString(input.getUrn()); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { + + try { + EntityResponse response = + entityClient.getV2( + secretUrn.getEntityType(), + secretUrn, + Set.of(SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); + if (!entityClient.exists(secretUrn, context.getAuthentication()) + || response == null) { + throw new IllegalArgumentException( + String.format("Secret for urn %s doesn't exists!", secretUrn)); + } + + DataHubSecretValue updatedVal = + DataHubSecretValueMapper.map( + response, + input.getName(), + secretService.encrypt(input.getValue()), + input.getDescription(), + null); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + secretUrn, SECRET_VALUE_ASPECT_NAME, updatedVal); + return entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update a secret with urn %s and name %s", + secretUrn, input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index 51c9e30aadcce1..d2387820ca7ab7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -14,7 +14,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -63,14 +62,15 @@ public CompletableFuture get( // First, get all ingestion sources Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.INGESTION_SOURCE_ENTITY_NAME, query, buildFilter(filters, Collections.emptyList()), null, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, resolve all ingestion sources final Map entities = diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index 06bad27e270620..64f8f083a164ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -63,12 +63,12 @@ public CompletableFuture get(DataFetchingEnvironment final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); final SearchResult gmsResult = _entityClient.filter( + context.getOperationContext(), Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, filter, sortCriterion, start, - count, - context.getAuthentication()); + count); final List dataProcessInstanceUrns = gmsResult.getEntities().stream() .map(SearchEntity::getEntity) @@ -90,7 +90,7 @@ public CompletableFuture get(DataFetchingEnvironment final List dataProcessInstances = gmsResults.stream() .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) + .map(p -> DataProcessInstanceMapper.map(context, p)) .collect(Collectors.toList()); // Step 4: Package and return result diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index d595b1e513d75e..660484cc27e154 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -69,12 +69,12 @@ public CompletableFuture get(DataFetchingEnvironment final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); final SearchResult gmsResult = _entityClient.filter( + context.getOperationContext(), Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, filter, sortCriterion, start, - count, - context.getAuthentication()); + count); final List dataProcessInstanceUrns = gmsResult.getEntities().stream() .map(SearchEntity::getEntity) @@ -96,7 +96,7 @@ public CompletableFuture get(DataFetchingEnvironment final List dataProcessInstances = gmsResults.stream() .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) + .map(p -> DataProcessInstanceMapper.map(context, p)) .collect(Collectors.toList()); // Step 4: Package and return result diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index a0caef20a4755e..020688765cc53e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,10 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.lineage; +import static com.datahub.authorization.AuthUtil.buildDisjunctivePrivilegeGroup; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; -import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -13,13 +14,11 @@ import com.linkedin.datahub.graphql.generated.LineageEdge; import com.linkedin.datahub.graphql.generated.UpdateLineageInput; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -35,7 +34,7 @@ @RequiredArgsConstructor public class UpdateLineageResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final LineageService _lineageService; @Override @@ -60,9 +59,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { + final Set existingDownstreamUrns = _entityService.exists(downstreamUrns, true); + // build MCP for every downstreamUrn for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { + if (!existingDownstreamUrns.contains(downstreamUrn)) { throw new IllegalArgumentException( String.format( "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); @@ -128,9 +129,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + final Set existingUpstreamUrns = _entityService.exists(upstreamUrns, true); + // build MCP for upstreamUrn if necessary for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { + if (!existingUpstreamUrns.contains(upstreamUrn)) { throw new IllegalArgumentException( String.format( "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); @@ -240,15 +243,9 @@ private void checkPrivileges( @Nonnull final QueryContext context, @Nonnull final List edgesToAdd, @Nonnull final List edgesToRemove) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup editLineagePrivileges = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup( - Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); + buildDisjunctivePrivilegeGroup(LINEAGE, UPDATE, null); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index ecf36769dfa9f8..2519d91aa3a84d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -3,16 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; +import graphql.execution.DataFetcherResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class BatchGetEntitiesResolver implements DataFetcher>> { private final List> _entityTypes; @@ -30,13 +34,21 @@ public CompletableFuture> get(DataFetchingEnvironment environment) final List entities = _entitiesProvider.apply(environment); Map> entityTypeToEntities = new HashMap<>(); - entities.forEach( - (entity) -> { - EntityType type = entity.getType(); - List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); - entitiesList.add(entity); - entityTypeToEntities.put(type, entitiesList); - }); + Map> entityIndexMap = new HashMap<>(); + int index = 0; + for (Entity entity : entities) { + List indexList = new ArrayList<>(); + if (entityIndexMap.containsKey(entity.getUrn())) { + indexList = entityIndexMap.get(entity.getUrn()); + } + indexList.add(index); + entityIndexMap.put(entity.getUrn(), indexList); + index++; + EntityType type = entity.getType(); + List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); + entitiesList.add(entity); + entityTypeToEntities.put(type, entitiesList); + } List>> entitiesFutures = new ArrayList<>(); @@ -49,9 +61,32 @@ public CompletableFuture> get(DataFetchingEnvironment environment) return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) .thenApply( - v -> - entitiesFutures.stream() - .flatMap(future -> future.join().stream()) - .collect(Collectors.toList())); + v -> { + Entity[] finalEntityList = new Entity[entities.size()]; + // Returned objects can be either of type Entity or wrapped as + // DataFetcherResult + // Therefore we need to be working with raw Objects in this area of the code + List returnedList = + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList()); + for (Object element : returnedList) { + Entity entity = null; + if (element instanceof DataFetcherResult) { + entity = ((DataFetcherResult) element).getData(); + } else if (element instanceof Entity) { + entity = (Entity) element; + } else { + throw new RuntimeException( + String.format( + "Cannot process entity because it is neither an Entity not a DataFetcherResult. %s", + element)); + } + for (int idx : entityIndexMap.get(entity.getUrn())) { + finalEntityList[idx] = entity; + } + } + return Arrays.asList(finalEntityList); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index c63ec819e8f6a1..8de18ec01e6dc2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -2,18 +2,27 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.datahub.authorization.AuthorizationConfiguration; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.LineageInput; import com.linkedin.datahub.graphql.generated.LineageRelationship; +import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.graph.SiblingGraphService; +import com.linkedin.metadata.query.LineageFlags; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.net.URISyntaxException; +import io.datahubproject.metadata.services.RestrictedService; import java.util.HashSet; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -29,14 +38,22 @@ public class EntityLineageResultResolver implements DataFetcher> { private final SiblingGraphService _siblingGraphService; + private final RestrictedService _restrictedService; + private final AuthorizationConfiguration _authorizationConfiguration; - public EntityLineageResultResolver(final SiblingGraphService siblingGraphService) { + public EntityLineageResultResolver( + final SiblingGraphService siblingGraphService, + final RestrictedService restrictedService, + final AuthorizationConfiguration authorizationConfiguration) { _siblingGraphService = siblingGraphService; + _restrictedService = restrictedService; + _authorizationConfiguration = authorizationConfiguration; } @Override public CompletableFuture get(DataFetchingEnvironment environment) { - final String urn = ((Entity) environment.getSource()).getUrn(); + final QueryContext context = environment.getContext(); + Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); final LineageDirection lineageDirection = input.getDirection(); @@ -49,29 +66,48 @@ public CompletableFuture get(DataFetchingEnvironment enviro com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); + final Urn finalUrn = urn; return CompletableFuture.supplyAsync( () -> { try { - return mapEntityRelationships( + com.linkedin.metadata.graph.EntityLineageResult entityLineageResult = _siblingGraphService.getLineage( - Urn.createFromString(urn), + finalUrn, resolvedDirection, start != null ? start : 0, count != null ? count : 100, 1, separateSiblings != null ? input.getSeparateSiblings() : false, new HashSet<>(), - startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); + new LineageFlags() + .setStartTimeMillis(startTimeMillis, SetMode.REMOVE_IF_NULL) + .setEndTimeMillis(endTimeMillis, SetMode.REMOVE_IF_NULL)); + + Set restrictedUrns = new HashSet<>(); + entityLineageResult + .getRelationships() + .forEach( + rel -> { + if (_authorizationConfiguration.getView().isEnabled() + && !AuthorizationUtils.canViewRelationship( + context.getOperationContext(), rel.getEntity(), urn)) { + restrictedUrns.add(rel.getEntity()); + } + }); + + return mapEntityRelationships(context, entityLineageResult, restrictedUrns); + } catch (Exception e) { + log.error("Failed to fetch lineage for {}", finalUrn); + throw new RuntimeException( + String.format("Failed to fetch lineage for %s", finalUrn), e); } }); } private EntityLineageResult mapEntityRelationships( - final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult) { + @Nullable final QueryContext context, + final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult, + final Set restrictedUrns) { final EntityLineageResult result = new EntityLineageResult(); result.setStart(entityLineageResult.getStart()); result.setCount(entityLineageResult.getCount()); @@ -79,17 +115,29 @@ private EntityLineageResult mapEntityRelationships( result.setFiltered(entityLineageResult.getFiltered()); result.setRelationships( entityLineageResult.getRelationships().stream() - .map(this::mapEntityRelationship) + .map(r -> mapEntityRelationship(context, r, restrictedUrns)) .collect(Collectors.toList())); return result; } private LineageRelationship mapEntityRelationship( - final com.linkedin.metadata.graph.LineageRelationship lineageRelationship) { + @Nullable final QueryContext context, + final com.linkedin.metadata.graph.LineageRelationship lineageRelationship, + final Set restrictedUrns) { final LineageRelationship result = new LineageRelationship(); - final Entity partialEntity = UrnToEntityMapper.map(lineageRelationship.getEntity()); - if (partialEntity != null) { - result.setEntity(partialEntity); + if (restrictedUrns.contains(lineageRelationship.getEntity())) { + final Restricted restrictedEntity = new Restricted(); + restrictedEntity.setType(EntityType.RESTRICTED); + String restrictedUrnString = + _restrictedService.encryptRestrictedUrn(lineageRelationship.getEntity()).toString(); + + restrictedEntity.setUrn(restrictedUrnString); + result.setEntity(restrictedEntity); + } else { + final Entity partialEntity = UrnToEntityMapper.map(context, lineageRelationship.getEntity()); + if (partialEntity != null) { + result.setEntity(partialEntity); + } } result.setType(lineageRelationship.getType()); result.setDegree(lineageRelationship.getDegree()); @@ -98,14 +146,14 @@ private LineageRelationship mapEntityRelationship( } if (lineageRelationship.hasCreatedActor()) { final Urn createdActor = lineageRelationship.getCreatedActor(); - result.setCreatedActor(UrnToEntityMapper.map(createdActor)); + result.setCreatedActor(UrnToEntityMapper.map(context, createdActor)); } if (lineageRelationship.hasUpdatedOn()) { result.setUpdatedOn(lineageRelationship.getUpdatedOn()); } if (lineageRelationship.hasUpdatedActor()) { final Urn updatedActor = lineageRelationship.getUpdatedActor(); - result.setUpdatedActor(UrnToEntityMapper.map(updatedActor)); + result.setUpdatedActor(UrnToEntityMapper.map(context, updatedActor)); } result.setIsManual(lineageRelationship.hasIsManual() && lineageRelationship.isIsManual()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 223548d5d62427..f3edbf8a3a7374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import com.linkedin.common.EntityRelationship; @@ -17,6 +18,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. @@ -47,6 +49,7 @@ public CompletableFuture get(DataFetchingEnvironment return CompletableFuture.supplyAsync( () -> mapEntityRelationships( + context, fetchEntityRelationships( urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), resolvedDirection)); @@ -64,17 +67,28 @@ private EntityRelationships fetchEntityRelationships( } private EntityRelationshipsResult mapEntityRelationships( + @Nullable final QueryContext context, final EntityRelationships entityRelationships, final RelationshipDirection relationshipDirection) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); + + List viewable = + entityRelationships.getRelationships().stream() + .filter( + rel -> context == null || canView(context.getOperationContext(), rel.getEntity())) + .collect(Collectors.toList()); + result.setStart(entityRelationships.getStart()); - result.setCount(entityRelationships.getCount()); - result.setTotal(entityRelationships.getTotal()); + result.setCount(viewable.size()); + // TODO fix the calculation at the graph call + result.setTotal( + entityRelationships.getTotal() - (entityRelationships.getCount() - viewable.size())); result.setRelationships( - entityRelationships.getRelationships().stream() + viewable.stream() .map( entityRelationship -> mapEntityRelationship( + context, com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( relationshipDirection.name()), entityRelationship)) @@ -83,18 +97,19 @@ private EntityRelationshipsResult mapEntityRelationships( } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( + @Nullable final QueryContext context, final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); - final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); + final Entity partialEntity = UrnToEntityMapper.map(context, entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); } result.setType(entityRelationship.getType()); result.setDirection(direction); if (entityRelationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(entityRelationship.getCreated())); + result.setCreated(AuditStampMapper.map(context, entityRelationship.getCreated())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index 0d00823697c25b..f233209cda67ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -2,9 +2,9 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FilterInput; import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; @@ -22,9 +22,8 @@ import graphql.schema.DataFetchingEnvironment; import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; -import java.util.function.Function; +import java.util.function.BiFunction; import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -48,14 +47,14 @@ public class TimeSeriesAspectResolver private final EntityClient _client; private final String _entityName; private final String _aspectName; - private final Function _aspectMapper; + private final BiFunction _aspectMapper; private final SortCriterion _sort; public TimeSeriesAspectResolver( final EntityClient client, final String entityName, final String aspectName, - final Function aspectMapper) { + final BiFunction aspectMapper) { this(client, entityName, aspectName, aspectMapper, null); } @@ -63,7 +62,7 @@ public TimeSeriesAspectResolver( final EntityClient client, final String entityName, final String aspectName, - final Function aspectMapper, + final BiFunction aspectMapper, final SortCriterion sort) { _client = client; _entityName = entityName; @@ -76,10 +75,11 @@ public TimeSeriesAspectResolver( private boolean isAuthorized(QueryContext context, String urn) { if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized( - context, - Optional.of(new EntitySpec(_entityName, urn)), - PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE, + new EntitySpec(_entityName, urn)); } return true; } @@ -123,7 +123,9 @@ public CompletableFuture> get(DataFetchingEnvironment env context.getAuthentication()); // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); + return aspects.stream() + .map(a -> _aspectMapper.apply(context, a)) + .collect(Collectors.toList()); } catch (RemoteInvocationException e) { throw new RuntimeException("Failed to retrieve aspects from GMS", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java index 9c0d009ff9b0e8..d1ea81fab083c3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java @@ -6,7 +6,6 @@ import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AddOwnerInput; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -40,10 +39,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } OwnerInput ownerInput = ownerInputBuilder.build(); - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } + OwnerUtils.validateAuthorizedToUpdateOwners(environment.getContext(), targetUrn); return CompletableFuture.supplyAsync( () -> { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java index c64b2403364c8e..96500f23303f87 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java @@ -6,7 +6,6 @@ import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AddOwnersInput; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -34,10 +33,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } + OwnerUtils.validateAuthorizedToUpdateOwners(environment.getContext(), targetUrn); OwnerUtils.validateAddOwnerInput(owners, targetUrn, _entityService); try { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java index 94182835de159a..4d57031954e31a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java @@ -5,7 +5,6 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchAddOwnersInput; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -74,10 +73,7 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte "Malformed input provided: owners cannot be applied to subresources."); } - if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } + OwnerUtils.validateAuthorizedToUpdateOwners(context, resourceUrn); LabelUtils.validateResource( resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java index 30e04ac36ee0f7..c0996b07fb9616 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java @@ -5,7 +5,6 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchRemoveOwnersInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.LabelUtils; @@ -14,7 +13,6 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; @@ -32,10 +30,10 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); final List owners = input.getOwnerUrns(); final List resources = input.getResources(); - final Optional maybeOwnershipTypeUrn = + final Urn ownershipTypeUrn = input.getOwnershipTypeUrn() == null - ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + ? null + : Urn.createFromString(input.getOwnershipTypeUrn()); final QueryContext context = environment.getContext(); return CompletableFuture.supplyAsync( @@ -46,7 +44,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw try { // Then execute the bulk remove - batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); + batchRemoveOwners(owners, ownershipTypeUrn, resources, context); return true; } catch (Exception e) { log.error( @@ -71,24 +69,21 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte "Malformed input provided: owners cannot be removed from subresources."); } - if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } + OwnerUtils.validateAuthorizedToUpdateOwners(context, resourceUrn); LabelUtils.validateResource( resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } private void batchRemoveOwners( List ownerUrns, - Optional maybeOwnershipTypeUrn, + Urn ownershipTypeUrn, List resources, QueryContext context) { log.debug("Batch removing owners. owners: {}, resources: {}", ownerUrns, resources); try { OwnerUtils.removeOwnersFromResources( ownerUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), - maybeOwnershipTypeUrn, + ownershipTypeUrn, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 5a25e6d83e648a..aa7c1b152790f1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -20,7 +20,7 @@ @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -65,7 +65,7 @@ private void validateInputUrn(String urnStr, QueryContext context) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - if (!_entityService.exists(urn)) { + if (!_entityService.exists(urn, true)) { throw new IllegalArgumentException( String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index d0796389d22808..ab151d6244f489 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -44,7 +44,7 @@ public static void updateFieldDescription( Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService) { + EntityService entityService) { EditableSchemaMetadata editableSchemaMetadata = (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( @@ -66,7 +66,7 @@ public static void updateFieldDescription( } public static void updateContainerDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = (EditableContainerProperties) EntityUtils.getAspectFromEntity( @@ -84,7 +84,7 @@ public static void updateContainerDescription( } public static void updateDomainDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( @@ -107,7 +107,7 @@ public static void updateDomainDescription( } public static void updateTagDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( @@ -123,7 +123,7 @@ public static void updateTagDescription( } public static void updateCorpGroupDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( @@ -143,7 +143,7 @@ public static void updateCorpGroupDescription( } public static void updateGlossaryTermDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( @@ -168,7 +168,7 @@ public static void updateGlossaryTermDescription( } public static void updateGlossaryNodeDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( @@ -189,7 +189,7 @@ public static void updateGlossaryNodeDescription( } public static void updateNotebookDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( @@ -212,8 +212,8 @@ public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -223,8 +223,8 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -232,8 +232,8 @@ public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityS return true; } - public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } @@ -241,24 +241,24 @@ public static Boolean validateContainerInput(Urn resourceUrn, EntityService enti return true; } - public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { - if (!entityService.exists(corpUserUrn)) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { + if (!entityService.exists(corpUserUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { - if (!entityService.exists(notebookUrn)) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { + if (!entityService.exists(notebookUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); } @@ -335,7 +335,7 @@ public static boolean isAuthorizedToUpdateDescription( } public static void updateMlModelDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( @@ -355,7 +355,7 @@ public static void updateMlModelDescription( } public static void updateMlModelGroupDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( @@ -375,7 +375,7 @@ public static void updateMlModelGroupDescription( } public static void updateMlFeatureDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( @@ -395,7 +395,7 @@ public static void updateMlFeatureDescription( } public static void updateMlFeatureTableDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( @@ -415,7 +415,7 @@ public static void updateMlFeatureTableDescription( } public static void updateMlPrimaryKeyDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( @@ -435,7 +435,7 @@ public static void updateMlPrimaryKeyDescription( } public static void updateDataProductDescription( - String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e4c5c132be4f7d..dab8cfffd54e3d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -27,7 +27,7 @@ @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -67,7 +67,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { throw new IllegalArgumentException("Parent entity is not a domain."); } - if (!_entityService.exists(newParentDomainUrn)) { + if (!_entityService.exists(newParentDomainUrn, true)) { throw new IllegalArgumentException("Parent entity does not exist."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java index 9827aa0666d19b..ec62a951573e2a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java @@ -6,14 +6,12 @@ import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RemoveOwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -31,15 +29,12 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - Optional maybeOwnershipTypeUrn = + Urn ownershipTypeUrn = input.getOwnershipTypeUrn() == null - ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + ? null + : Urn.createFromString(input.getOwnershipTypeUrn()); - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } + OwnerUtils.validateAuthorizedToUpdateOwners(environment.getContext(), targetUrn); return CompletableFuture.supplyAsync( () -> { @@ -50,7 +45,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw ((QueryContext) environment.getContext()).getActorUrn()); OwnerUtils.removeOwnersFromResources( ImmutableList.of(ownerUrn), - maybeOwnershipTypeUrn, + ownershipTypeUrn, ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), actor, _entityService); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index dd44c2718b3a43..8e4a96637e04dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -35,7 +35,7 @@ @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -47,7 +47,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return CompletableFuture.supplyAsync( () -> { - if (!_entityService.exists(targetUrn)) { + if (!_entityService.exists(targetUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 848118e6cc0f6f..2fcec54978b4f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -26,7 +26,7 @@ @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override @@ -37,7 +37,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); - if (!_entityService.exists(targetUrn)) { + if (!_entityService.exists(targetUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } @@ -45,7 +45,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) + if (!_entityService.exists(parentNodeUrn, true) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { throw new IllegalArgumentException( String.format( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 3fffe9fa019e7b..7f9be9eb2706cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,17 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthUtil; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.mxe.MetadataChangeProposal; @@ -22,26 +19,12 @@ @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups); + return AuthUtil.isAuthorizedEntityUrns( + context.getAuthorizer(), context.getActorUrn(), DELETE, List.of(entityUrn)); } public static void updateStatusForResources( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index fb88d6c29f6621..8c0a3ef7a95a69 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -77,7 +77,7 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List resources, Urn actor, - EntityService entityService) + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -87,7 +87,10 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + @Nullable Urn domainUrn, + ResourceRefInput resource, + Urn actor, + EntityService entityService) { Domains domains = (Domains) EntityUtils.getAspectFromEntity( @@ -104,8 +107,8 @@ private static MetadataChangeProposal buildSetDomainProposal( UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } - public static void validateDomain(Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { + public static void validateDomain(Urn domainUrn, EntityService entityService) { + if (!entityService.exists(domainUrn, true)) { throw new IllegalArgumentException( String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } @@ -209,7 +212,7 @@ public static boolean hasChildDomains( // Limit count to 1 for existence check final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication()); + context.getOperationContext(), DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1); return (searchResult.getNumEntities() > 0); } @@ -223,7 +226,7 @@ private static Map getDomainsByNameAndParent( final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); + context.getOperationContext(), DOMAIN_ENTITY_NAME, filter, null, 0, 1000); final Set domainUrns = searchResult.getEntities().stream() @@ -285,7 +288,7 @@ public static Entity getParentDomain( new DomainProperties( entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); - return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; + return parentDomainUrn != null ? UrnToEntityMapper.map(context, parentDomainUrn) : null; } } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java new file mode 100644 index 00000000000000..9a06682c87f78f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java @@ -0,0 +1,97 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.form.FormInfo; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.structured.PrimitivePropertyValueArray; +import java.util.Objects; +import javax.annotation.Nonnull; + +public class FormUtils { + + private static final String COMPLETED_FORMS = "completedForms"; + private static final String INCOMPLETE_FORMS = "incompleteForms"; + private static final String VERIFIED_FORMS = "verifiedForms"; + private static final String OWNERS = "owners"; + private static final String COMPLETED_FORMS_COMPLETED_PROMPT_IDS = + "completedFormsCompletedPromptIds"; + private static final String INCOMPLETE_FORMS_COMPLETED_PROMPT_IDS = + "incompleteFormsCompletedPromptIds"; + + private FormUtils() {} + + public static PrimitivePropertyValueArray getStructuredPropertyValuesFromInput( + @Nonnull final SubmitFormPromptInput input) { + final PrimitivePropertyValueArray values = new PrimitivePropertyValueArray(); + + input + .getStructuredPropertyParams() + .getValues() + .forEach(value -> values.add(StructuredPropertyUtils.mapPropertyValueInput(value))); + + return values; + } + + /** Map a GraphQL CreateDynamicFormAssignmentInput to the GMS DynamicFormAssignment aspect */ + @Nonnull + public static DynamicFormAssignment mapDynamicFormAssignment( + @Nonnull final CreateDynamicFormAssignmentInput input) { + Objects.requireNonNull(input, "input must not be null"); + + final DynamicFormAssignment result = new DynamicFormAssignment(); + final Filter filter = + new Filter() + .setOr(ResolverUtils.buildConjunctiveCriterionArrayWithOr(input.getOrFilters())); + result.setFilter(filter); + return result; + } + + /** + * Creates a Filter where the provided formUrn is either in completedForms or incompleteForms for + * an entity + */ + private static Filter generateCompleteOrIncompleteFilter(@Nonnull final String formUrn) + throws Exception { + final CriterionArray completedFormsAndArray = new CriterionArray(); + final CriterionArray incompleteFormsAndArray = new CriterionArray(); + completedFormsAndArray.add(buildFormCriterion(formUrn, COMPLETED_FORMS)); + incompleteFormsAndArray.add(buildFormCriterion(formUrn, INCOMPLETE_FORMS)); + // need this to be an OR not two ANDs + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(completedFormsAndArray), + new ConjunctiveCriterion().setAnd(incompleteFormsAndArray))); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field) { + return buildFormCriterion(formUrn, field, false); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field, final boolean negated) { + return new Criterion() + .setField(field) + .setValue(formUrn) + .setCondition(Condition.EQUAL) + .setNegated(negated); + } + + private static boolean isActorExplicitlyAssigned( + @Nonnull final Urn actorUrn, @Nonnull final FormInfo formInfo) { + return (formInfo.getActors().getUsers() != null + && formInfo.getActors().getUsers().stream().anyMatch(user -> user.equals(actorUrn))) + || (formInfo.getActors().getGroups() != null + && formInfo.getActors().getGroups().stream().anyMatch(group -> group.equals(actorUrn))); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 996bd3da120d6f..ea487f3a940905 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -16,7 +17,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; -import java.util.Optional; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -32,8 +32,8 @@ private GlossaryUtils() {} * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized( - context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index 8765b91f65d9d3..09323fdfc83778 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -42,7 +42,11 @@ public class LabelUtils { private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { + Urn labelUrn, + Urn resourceUrn, + String subResource, + Urn actor, + EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) @@ -80,7 +84,7 @@ public static void removeTermFromResource( } public static void removeTagsFromResources( - List tags, List resources, Urn actor, EntityService entityService) + List tags, List resources, Urn actor, EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -90,7 +94,10 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List tagUrns, List resources, Urn actor, EntityService entityService) + List tagUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -100,7 +107,10 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List termUrns, List resources, Urn actor, EntityService entityService) + List termUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -110,7 +120,10 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List termUrns, List resources, Urn actor, EntityService entityService) + List termUrns, + List resources, + Urn actor, + EntityService entityService) throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { @@ -124,7 +137,7 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService) + EntityService entityService) throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = @@ -248,7 +261,7 @@ public static void validateResourceAndLabel( String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, + EntityService entityService, Boolean isRemoving) { for (Urn urn : labelUrns) { validateResourceAndLabel( @@ -263,14 +276,14 @@ public static void validateResourceAndLabel( } public static void validateLabel( - Urn labelUrn, String labelEntityType, EntityService entityService) { + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { throw new IllegalArgumentException( String.format( "Failed to validate label with urn %s. Urn type does not match entity type %s..", labelUrn, labelEntityType)); } - if (!entityService.exists(labelUrn)) { + if (!entityService.exists(labelUrn, true)) { throw new IllegalArgumentException( String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } @@ -281,8 +294,8 @@ public static void validateResource( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); @@ -310,7 +323,7 @@ public static void validateResourceAndLabel( String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, + EntityService entityService, Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); @@ -319,7 +332,7 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity @@ -331,7 +344,7 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity @@ -343,7 +356,7 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity( @@ -361,7 +374,7 @@ private static MetadataChangeProposal buildRemoveTagsToEntityProposal( } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( @@ -383,7 +396,7 @@ private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.common.GlobalTags tags = (com.linkedin.common.GlobalTags) @@ -402,7 +415,7 @@ private static MetadataChangeProposal buildAddTagsToEntityProposal( } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) @@ -455,7 +468,7 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) } private static MetadataChangeProposal buildAddTermsProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity @@ -467,7 +480,7 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity @@ -479,7 +492,7 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) @@ -500,7 +513,7 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) @@ -526,7 +539,7 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( @@ -542,7 +555,7 @@ private static MetadataChangeProposal buildRemoveTermsToEntityProposal( } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index b93c72edbcfc51..d82b8c17ff1b7e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -28,7 +28,11 @@ public class LinkUtils { private LinkUtils() {} public static void addLink( - String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + String linkUrl, + String linkLabel, + Urn resourceUrn, + Urn actor, + EntityService entityService) { InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( @@ -46,7 +50,7 @@ public static void addLink( } public static void removeLink( - String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( @@ -109,7 +113,7 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, Urn resourceUrn, EntityService entityService) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); @@ -120,7 +124,7 @@ public static Boolean validateAddRemoveInput( resourceUrn)); } - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change institutional memory for resource %s. Resource does not exist.", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 15c3c14c7b8f67..1dd9da97cb2f7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -15,6 +15,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.OwnerEntityType; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.OwnershipType; @@ -26,8 +27,8 @@ import com.linkedin.mxe.MetadataChangeProposal; import java.util.ArrayList; import java.util.List; -import java.util.Optional; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; // TODO: Move to consuming from OwnerService @@ -42,22 +43,22 @@ public class OwnerUtils { private OwnerUtils() {} public static void addOwnersToResources( - List owners, - List resources, - Urn actor, + List ownerInputs, + List resourceRefs, + Urn actorUrn, EntityService entityService) { final List changes = new ArrayList<>(); - for (ResourceRefInput resource : resources) { + for (ResourceRefInput resource : resourceRefs) { changes.add( buildAddOwnersProposal( - owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); + ownerInputs, UrnUtils.getUrn(resource.getResourceUrn()), actorUrn, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(changes, entityService, actorUrn, false); } public static void removeOwnersFromResources( List ownerUrns, - Optional maybeOwnershipTypeUrn, + @Nullable Urn ownershipTypeUrn, List resources, Urn actor, EntityService entityService) { @@ -66,7 +67,7 @@ public static void removeOwnersFromResources( changes.add( buildRemoveOwnersProposal( ownerUrns, - maybeOwnershipTypeUrn, + ownershipTypeUrn, UrnUtils.getUrn(resource.getResourceUrn()), actor, entityService)); @@ -75,7 +76,7 @@ public static void removeOwnersFromResources( } static MetadataChangeProposal buildAddOwnersProposal( - List owners, Urn resourceUrn, EntityService entityService) { + List owners, Urn resourceUrn, Urn actor, EntityService entityService) { Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( @@ -83,8 +84,9 @@ static MetadataChangeProposal buildAddOwnersProposal( Constants.OWNERSHIP_ASPECT_NAME, entityService, new Ownership()); + ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); for (OwnerInput input : owners) { - addOwner( + addOwnerToAspect( ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), @@ -96,7 +98,7 @@ static MetadataChangeProposal buildAddOwnersProposal( public static MetadataChangeProposal buildRemoveOwnersProposal( List ownerUrns, - Optional maybeOwnershipTypeUrn, + @Nullable Urn ownershipTypeUrn, Urn resourceUrn, Urn actor, EntityService entityService) { @@ -108,36 +110,19 @@ public static MetadataChangeProposal buildRemoveOwnersProposal( entityService, new Ownership()); ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); - removeOwnersIfExists(ownershipAspect, ownerUrns, maybeOwnershipTypeUrn); + removeOwnersIfExists(ownershipAspect, ownerUrns, ownershipTypeUrn); return buildMetadataChangeProposalWithUrn( resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } - private static void addOwner( - Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { + private static void addOwnerToAspect( + Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipTypeUrn) { if (!ownershipAspect.hasOwners()) { ownershipAspect.setOwners(new OwnerArray()); } - final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); - ownerArray.removeIf( - owner -> { - // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(ownershipUrn); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(ownershipUrn.toString()); - }); + OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); + removeExistingOwnerIfExists(ownerArray, ownerUrn, ownershipTypeUrn); Owner newOwner = new Owner(); @@ -150,49 +135,52 @@ private static void addOwner( : com.linkedin.common.OwnershipType.CUSTOM; newOwner.setType(gmsType); - newOwner.setTypeUrn(ownershipUrn); + newOwner.setTypeUrn(ownershipTypeUrn); newOwner.setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)); newOwner.setOwner(ownerUrn); ownerArray.add(newOwner); ownershipAspect.setOwners(ownerArray); } + private static void removeExistingOwnerIfExists( + OwnerArray ownerArray, Urn ownerUrn, Urn ownershipTypeUrn) { + ownerArray.removeIf( + owner -> { + // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) + return isOwnerEqual(owner, ownerUrn, ownershipTypeUrn); + }); + } + + public static boolean isOwnerEqual( + @Nonnull Owner owner, @Nonnull Urn ownerUrn, @Nullable Urn ownershipTypeUrn) { + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(ownershipTypeUrn); + } + if (ownershipTypeUrn == null) { + return true; + } + // Fall back to mapping deprecated type to the new ownership entity + return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(ownershipTypeUrn.toString()); + } + private static void removeOwnersIfExists( - Ownership ownership, List ownerUrns, Optional maybeOwnershipTypeUrn) { - if (!ownership.hasOwners()) { - ownership.setOwners(new OwnerArray()); + Ownership ownershipAspect, List ownerUrns, Urn ownershipTypeUrn) { + if (!ownershipAspect.hasOwners()) { + ownershipAspect.setOwners(new OwnerArray()); } - OwnerArray ownerArray = ownership.getOwners(); + OwnerArray ownerArray = ownershipAspect.getOwners(); for (Urn ownerUrn : ownerUrns) { - if (maybeOwnershipTypeUrn.isPresent()) { - ownerArray.removeIf( - owner -> { - // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches - // remove - return mapOwnershipTypeToEntity( - OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(maybeOwnershipTypeUrn.get().toString()); - }); - } else { - ownerArray.removeIf(owner -> owner.getOwner().equals(ownerUrn)); - } + removeExistingOwnerIfExists(ownerArray, ownerUrn, ownershipTypeUrn); } } - public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, Urn resourceUrn) { + public static void validateAuthorizedToUpdateOwners( + @Nonnull QueryContext context, Urn resourceUrn) { final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( @@ -200,40 +188,39 @@ public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - resourceUrn.getEntityType(), - resourceUrn.toString(), - orPrivilegeGroups); + boolean authorized = + AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + if (!authorized) { + throw new AuthorizationException( + "Unauthorized to update owners. Please contact your DataHub administrator."); + } } - public static Boolean validateAddOwnerInput( - List owners, Urn resourceUrn, EntityService entityService) { + public static void validateAddOwnerInput( + List owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { - boolean result = validateAddOwnerInput(owner, resourceUrn, entityService); - if (!result) { - return false; - } + validateAddOwnerInput(owner, resourceUrn, entityService); } - return true; } - public static Boolean validateAddOwnerInput( - OwnerInput owner, Urn resourceUrn, EntityService entityService) { + public static void validateAddOwnerInput( + OwnerInput owner, Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } validateOwner(owner, entityService); - - return true; } - public static void validateOwner(OwnerInput owner, EntityService entityService) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); @@ -254,7 +241,7 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) ownerUrn)); } - if (!entityService.exists(ownerUrn)) { + if (!entityService.exists(ownerUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource(s). Owner with urn %s does not exist.", @@ -262,7 +249,7 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) } if (owner.getOwnershipTypeUrn() != null - && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()), true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource(s). Custom Ownership type with " @@ -277,26 +264,30 @@ public static void validateOwner(OwnerInput owner, EntityService entityService) } } - public static Boolean validateRemoveInput(Urn resourceUrn, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { + public static void validateRemoveInput(Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(resourceUrn, true)) { throw new IllegalArgumentException( String.format( "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } - return true; } public static void addCreatorAsOwner( QueryContext context, String urn, OwnerEntityType ownerEntityType, - OwnershipType ownershipType, - EntityService entityService) { + EntityService entityService) { try { Urn actorUrn = CorpuserUrn.createFromString(context.getActorUrn()); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())), true)) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); - if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { + if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn), true)) { throw new RuntimeException( String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/StructuredPropertyUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/StructuredPropertyUtils.java new file mode 100644 index 00000000000000..8c4e70fdac6055 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/StructuredPropertyUtils.java @@ -0,0 +1,22 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.datahub.graphql.generated.PropertyValueInput; +import com.linkedin.structured.PrimitivePropertyValue; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class StructuredPropertyUtils { + + private StructuredPropertyUtils() {} + + @Nullable + public static PrimitivePropertyValue mapPropertyValueInput( + @Nonnull final PropertyValueInput valueInput) { + if (valueInput.getStringValue() != null) { + return PrimitivePropertyValue.create(valueInput.getStringValue()); + } else if (valueInput.getNumberValue() != null) { + return PrimitivePropertyValue.create(valueInput.getNumberValue().doubleValue()); + } + return null; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index abc479ed18ebf8..a51714123057da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,6 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.operation; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; import static com.linkedin.metadata.Constants.*; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 1c8f43a4901737..aec3848f966406 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -11,7 +11,6 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -60,14 +59,13 @@ public CompletableFuture get(DataFetchingEnvironment e final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.OWNERSHIP_TYPE_ENTITY_NAME, query, buildFilter(filters, Collections.emptyList()), DEFAULT_SORT_CRITERION, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 839121a295d9ac..4a5568619a5cf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -16,6 +16,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -50,7 +51,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro context.getAuthentication(), System.currentTimeMillis()); log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); + return getOwnershipType(context, urn, context.getAuthentication()); } catch (AuthorizationException e) { throw e; } catch (Exception e) { @@ -61,7 +62,9 @@ public CompletableFuture get(DataFetchingEnvironment enviro } private OwnershipTypeEntity getOwnershipType( - @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + @Nullable QueryContext context, + @Nonnull final Urn urn, + @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); // If there is no response, there is a problem. @@ -71,6 +74,6 @@ private OwnershipTypeEntity getOwnershipType( "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } - return OwnershipTypeMapper.map(maybeResponse); + return OwnershipTypeMapper.map(context, maybeResponse); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 3328eff2bdf45b..7bfd166b18a205 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -9,7 +9,7 @@ import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.GetGrantedPrivilegesInput; import com.linkedin.datahub.graphql.generated.Privileges; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index 87832b8c3aa401..4120401e0150f9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -5,17 +5,26 @@ import com.datahub.authorization.PolicyFetcher; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.AndFilterInput; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListPoliciesInput; import com.linkedin.datahub.graphql.generated.ListPoliciesResult; import com.linkedin.datahub.graphql.generated.Policy; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.policy.mappers.PolicyInfoPolicyMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class ListPoliciesResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -40,16 +49,27 @@ public CompletableFuture get(final DataFetchingEnvironment e final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); + log.debug( + "User {} listing policies with filters {}", context.getActorUrn(), filters.toString()); + + final Filter filter = ResolverUtils.buildFilter(facetFilters, Collections.emptyList()); return _policyFetcher - .fetchPolicies(start, query, count, context.getAuthentication()) + .fetchPolicies(context.getOperationContext(), start, query, count, filter) .thenApply( policyFetchResult -> { final ListPoliciesResult result = new ListPoliciesResult(); result.setStart(start); result.setCount(count); result.setTotal(policyFetchResult.getTotal()); - result.setPolicies(mapEntities(policyFetchResult.getPolicies())); + result.setPolicies(mapEntities(context, policyFetchResult.getPolicies())); return result; }); } @@ -57,11 +77,12 @@ public CompletableFuture get(final DataFetchingEnvironment e "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private List mapEntities(final List policies) { + private static List mapEntities( + @Nullable QueryContext context, final List policies) { return policies.stream() .map( policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); + Policy mappedPolicy = PolicyInfoPolicyMapper.map(context, policy.getPolicyInfo()); mappedPolicy.setUrn(policy.getUrn().toString()); return mappedPolicy; }) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index d0446d218dac6b..7babe63745f727 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,22 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.policy; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthUtil; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.metadata.authorization.PoliciesConfig; +import java.util.List; import javax.annotation.Nonnull; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized( - principal, - ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), - authorizer); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } private PolicyAuthUtils() {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index dcdf78ebc15bb1..829aa0024c23a6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -43,7 +43,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; - final DataHubPolicyInfo info = PolicyUpdateInputInfoMapper.map(input); + final DataHubPolicyInfo info = PolicyUpdateInputInfoMapper.map(context, input); info.setLastUpdatedTimestamp(System.currentTimeMillis()); if (policyUrn.isPresent()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index a350fb91f9d3b8..2eaa08069a688e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; @@ -19,6 +20,7 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link @@ -28,12 +30,13 @@ public class PolicyInfoPolicyMapper implements ModelMapper mapValue(context, v)) .collect(Collectors.toList())) .setCondition( PolicyMatchCondition.valueOf(criterion.getCondition().name())) @@ -108,13 +113,14 @@ private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter .build(); } - private PolicyMatchCriterionValue mapValue(final String value) { + private static PolicyMatchCriterionValue mapValue( + @Nullable QueryContext context, final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); return PolicyMatchCriterionValue.builder() .setValue(value) - .setEntity(UrnToEntityMapper.map(urn)) + .setEntity(UrnToEntityMapper.map(context, urn)) .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index d82d71295d41b9..0397f764f61bdd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -3,6 +3,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilterInput; import com.linkedin.datahub.graphql.generated.PolicyMatchFilterInput; import com.linkedin.datahub.graphql.generated.PolicyUpdateInput; @@ -18,6 +19,7 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ public class PolicyUpdateInputInfoMapper @@ -25,12 +27,14 @@ public class PolicyUpdateInputInfoMapper public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); - public static DataHubPolicyInfo map(@Nonnull final PolicyUpdateInput policyInput) { - return INSTANCE.apply(policyInput); + public static DataHubPolicyInfo map( + @Nullable QueryContext context, @Nonnull final PolicyUpdateInput policyInput) { + return INSTANCE.apply(context, policyInput); } @Override - public DataHubPolicyInfo apply(@Nonnull final PolicyUpdateInput policyInput) { + public DataHubPolicyInfo apply( + @Nullable QueryContext queryContext, @Nonnull final PolicyUpdateInput policyInput) { final DataHubPolicyInfo result = new DataHubPolicyInfo(); result.setDescription(policyInput.getDescription()); result.setType(policyInput.getType().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 5292adbe3aac39..49d2a3eff70f0d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -11,7 +11,6 @@ import com.linkedin.datahub.graphql.types.post.PostMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -57,14 +56,13 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all Post Urns. final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), POST_ENTITY_NAME, query, null, sortCriterion, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get and hydrate all Posts. final Map entities = @@ -82,7 +80,9 @@ public CompletableFuture get(final DataFetchingEnvironment envi result.setCount(gmsResult.getPageSize()); result.setTotal(gmsResult.getNumEntities()); result.setPosts( - entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + entities.values().stream() + .map(e -> PostMapper.map(context, e)) + .collect(Collectors.toList())); return result; } catch (Exception e) { throw new RuntimeException("Failed to list posts", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolver.java new file mode 100644 index 00000000000000..582c2842a4909b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolver.java @@ -0,0 +1,70 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.post.PostService; +import com.linkedin.common.Media; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.generated.UpdateMediaInput; +import com.linkedin.datahub.graphql.generated.UpdatePostContentInput; +import com.linkedin.datahub.graphql.generated.UpdatePostInput; +import com.linkedin.post.PostContent; +import graphql.GraphQLException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class UpdatePostResolver implements DataFetcher> { + private final PostService postService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { + throw new AuthorizationException( + "Unauthorized to update posts. Please contact your DataHub administrator if this needs corrective action."); + } + + final UpdatePostInput input = + bindArgument(environment.getArgument("input"), UpdatePostInput.class); + final Urn postUrn = Urn.createFromString(input.getUrn()); + + final PostType type = input.getPostType(); + final UpdatePostContentInput content = input.getContent(); + final PostContentType contentType = content.getContentType(); + final String title = content.getTitle(); + final String link = content.getLink(); + final String description = content.getDescription(); + final UpdateMediaInput updateMediaInput = content.getMedia(); + final Authentication authentication = context.getAuthentication(); + + Media media = + updateMediaInput == null + ? null + : postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + postService.mapPostContent(contentType.toString(), title, description, link, media); + + return CompletableFuture.supplyAsync( + () -> { + try { + return postService.updatePost(postUrn, type.toString(), postContent, authentication); + } catch (Exception e) { + throw new GraphQLException("Failed to update or edit post", e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 48f31fb75d371c..03e1d625c1e778 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -69,7 +69,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm .collect(Collectors.toList()), authentication, System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + return QueryMapper.map( + context, _queryService.getQueryEntityResponse(queryUrn, authentication)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to create a new Query from input %s", input), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index fec5bb120eebae..6fcc0fee763038 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -66,14 +65,16 @@ public CompletableFuture get(final DataFetchingEnvironment en // First, get all Query Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags( + flags -> flags.setFulltext(true).setSkipHighlighting(true)), QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true).setSkipHighlighting(true)); + count); final ListQueriesResult result = new ListQueriesResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index cc284aaf7b5637..5d485d24866fc5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -96,7 +96,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm : null, authentication, System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + return QueryMapper.map( + context, _queryService.getQueryEntityResponse(queryUrn, authentication)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to update Query from input %s", input), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index ca1e01b45989d2..c5c75d1e5c2c77 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -4,6 +4,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ContentParams; import com.linkedin.datahub.graphql.generated.EntityProfileParams; import com.linkedin.datahub.graphql.generated.FacetFilter; @@ -15,8 +16,8 @@ import com.linkedin.datahub.graphql.generated.RecommendationRenderType; import com.linkedin.datahub.graphql.generated.RecommendationRequestContext; import com.linkedin.datahub.graphql.generated.SearchParams; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.recommendation.EntityRequestContext; import com.linkedin.metadata.recommendation.RecommendationsService; @@ -30,6 +31,7 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -46,6 +48,7 @@ public class ListRecommendationsResolver @WithSpan @Override public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); @@ -55,13 +58,13 @@ public CompletableFuture get(DataFetchingEnvironment log.debug("Listing recommendations for input {}", input); List modules = _recommendationsService.listRecommendations( - Urn.createFromString(input.getUserUrn()), + context.getOperationContext(), mapRequestContext(input.getRequestContext()), input.getLimit()); return ListRecommendationsResult.builder() .setModules( modules.stream() - .map(this::mapRecommendationModule) + .map(m -> mapRecommendationModule(context, m)) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList())) @@ -121,6 +124,7 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq } private Optional mapRecommendationModule( + @Nullable QueryContext context, com.linkedin.metadata.recommendation.RecommendationModule module) { RecommendationModule mappedModule = new RecommendationModule(); mappedModule.setTitle(module.getTitle()); @@ -134,17 +138,18 @@ private Optional mapRecommendationModule( } mappedModule.setContent( module.getContent().stream() - .map(this::mapRecommendationContent) + .map(c -> mapRecommendationContent(context, c)) .collect(Collectors.toList())); return Optional.of(mappedModule); } private RecommendationContent mapRecommendationContent( + @Nullable QueryContext context, com.linkedin.metadata.recommendation.RecommendationContent content) { RecommendationContent mappedContent = new RecommendationContent(); mappedContent.setValue(content.getValue()); if (content.hasEntity()) { - mappedContent.setEntity(UrnToEntityMapper.map(content.getEntity())); + mappedContent.setEntity(UrnToEntityMapper.map(context, content.getEntity())); } if (content.hasParams()) { mappedContent.setParams(mapRecommendationParams(content.getParams())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 61ecf09fc91a51..3bf11b9febc638 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -38,7 +38,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm () -> { try { return new InviteToken( - _inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); + _inviteTokenService.getInviteToken( + context.getOperationContext(), roleUrnStr, true)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to create invite token for role %s", roleUrnStr), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 066753c4f7559f..039a1730e7e67a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -38,7 +38,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm () -> { try { return new InviteToken( - _inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); + _inviteTokenService.getInviteToken( + context.getOperationContext(), roleUrnStr, false)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to get invite token for role %s", roleUrnStr), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index a1dd9219f6549c..42ec8ff28db7d5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -11,7 +11,6 @@ import com.linkedin.datahub.graphql.types.role.mappers.DataHubRoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -24,6 +23,7 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -53,13 +53,12 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all role Urns. final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get and hydrate all users. final Map entities = @@ -76,7 +75,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi result.setStart(gmsResult.getFrom()); result.setCount(gmsResult.getPageSize()); result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); + result.setRoles(mapEntitiesToRoles(context, entities.values())); return result; } catch (Exception e) { throw new RuntimeException("Failed to list roles", e); @@ -84,9 +83,10 @@ public CompletableFuture get(final DataFetchingEnvironment envi }); } - private List mapEntitiesToRoles(final Collection entities) { + private static List mapEntitiesToRoles( + @Nullable QueryContext context, final Collection entities) { return entities.stream() - .map(DataHubRoleMapper::map) + .map(e -> DataHubRoleMapper.map(context, e)) .sorted(Comparator.comparing(DataHubRole::getName)) .collect(Collectors.toList()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index 6d23456b76b4f4..5aa59b19cb5bb1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -15,6 +15,7 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -22,6 +23,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -36,6 +38,7 @@ public class AggregateAcrossEntitiesResolver private final EntityClient _entityClient; private final ViewService _viewService; + private final FormService _formService; @Override public CompletableFuture get(DataFetchingEnvironment environment) { @@ -58,16 +61,18 @@ public CompletableFuture get(DataFetchingEnvironment environme context.getAuthentication()) : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); final List facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; try { return mapAggregateResults( + context, _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -75,13 +80,11 @@ public CompletableFuture get(DataFetchingEnvironment environme sanitizedQuery, maybeResolvedView != null ? SearchUtils.combineFilters( - baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, 0, 0, // 0 entity count because we don't want resolved entities - searchFlags, null, - ResolverUtils.getAuthentication(environment), facets)); } catch (Exception e) { log.error( @@ -99,11 +102,12 @@ public CompletableFuture get(DataFetchingEnvironment environme }); } - AggregateResults mapAggregateResults(SearchResult searchResult) { + static AggregateResults mapAggregateResults( + @Nullable QueryContext context, SearchResult searchResult) { final AggregateResults results = new AggregateResults(); results.setFacets( searchResult.getMetadata().getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); return results; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index c3e843cefd5c84..f300331ab4bc8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -10,9 +10,9 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleInput; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleResults; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -65,9 +66,18 @@ public CompletableFuture get(DataFetchingEnvironmen : null; List types = getEntityTypes(input.getTypes(), maybeResolvedView); + types = + types != null + ? types.stream() + .filter(AUTO_COMPLETE_ENTITY_TYPES::contains) + .collect(Collectors.toList()) + : null; if (types != null && types.size() > 0) { return AutocompleteUtils.batchGetAutocompleteResults( - types.stream().map(_typeToEntity::get).collect(Collectors.toList()), + types.stream() + .map(_typeToEntity::get) + .filter(Objects::nonNull) + .collect(Collectors.toList()), sanitizedQuery, input, environment, @@ -76,7 +86,10 @@ public CompletableFuture get(DataFetchingEnvironmen // By default, autocomplete only against the Default Set of Autocomplete entities return AutocompleteUtils.batchGetAutocompleteResults( - AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), + AUTO_COMPLETE_ENTITY_TYPES.stream() + .map(_typeToEntity::get) + .filter(Objects::nonNull) + .collect(Collectors.toList()), sanitizedQuery, input, environment, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index e54955e1857f09..ea6e329ba1a399 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -5,15 +5,14 @@ import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; -import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.search.AggregationMetadata; import com.linkedin.metadata.search.AggregationMetadataArray; @@ -23,6 +22,7 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Comparator; import java.util.List; @@ -30,6 +30,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -48,6 +49,7 @@ public class GetQuickFiltersResolver public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); @@ -58,12 +60,12 @@ public CompletableFuture get(final DataFetchingEnvironmen try { final SearchResult searchResult = - getSearchResults(ResolverUtils.getAuthentication(environment), input); + getSearchResults(context.getOperationContext(), input); final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); + quickFilters.addAll(getPlatformQuickFilters(context, aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(context, aggregations)); } catch (Exception e) { log.error("Failed getting quick filters", e); throw new RuntimeException("Failed to to get quick filters", e); @@ -74,13 +76,17 @@ public CompletableFuture get(final DataFetchingEnvironmen }); } - /** Do a star search with view filter applied to get info about all data in this instance. */ + /** + * Do a star search with view filter applied to get info about all data in this instance. Include + * aggregations. + */ private SearchResult getSearchResults( - @Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) + @Nonnull final OperationContext opContext, @Nonnull final GetQuickFiltersInput input) throws Exception { final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) + ? resolveView( + _viewService, UrnUtils.getUrn(input.getViewUrn()), opContext.getAuthentication()) : null; final List entityNames = SEARCHABLE_ENTITY_TYPES.stream() @@ -88,6 +94,7 @@ private SearchResult getSearchResults( .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( + opContext.withSearchFlags(flags -> flags.setSkipAggregates(false)), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -99,8 +106,7 @@ private SearchResult getSearchResults( 0, 0, null, - null, - authentication); + null); } /** @@ -108,7 +114,7 @@ private SearchResult getSearchResults( * top 5 to quick filters */ private List getPlatformQuickFilters( - @Nonnull final AggregationMetadataArray aggregations) { + @Nullable QueryContext context, @Nonnull final AggregationMetadataArray aggregations) { final List platforms = new ArrayList<>(); final Optional platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); @@ -120,7 +126,7 @@ private List getPlatformQuickFilters( sortedPlatforms.forEach( platformFilter -> { if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); + platforms.add(mapQuickFilter(context, PLATFORM, platformFilter)); } }); } @@ -136,7 +142,7 @@ private List getPlatformQuickFilters( * filters from a prioritized list. Do the same for datathub entity types. */ private List getEntityTypeQuickFilters( - @Nonnull final AggregationMetadataArray aggregations) { + @Nullable QueryContext context, @Nonnull final AggregationMetadataArray aggregations) { final List entityTypes = new ArrayList<>(); final Optional entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); @@ -144,6 +150,7 @@ private List getEntityTypeQuickFilters( if (entityAggregations.isPresent()) { final List sourceEntityTypeFilters = getQuickFiltersFromList( + context, SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); @@ -151,6 +158,7 @@ private List getEntityTypeQuickFilters( final List dataHubEntityTypeFilters = getQuickFiltersFromList( + context, SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); @@ -164,6 +172,7 @@ private List getEntityTypeQuickFilters( * until we reach the maxListSize defined */ private List getQuickFiltersFromList( + @Nullable QueryContext context, @Nonnull final List prioritizedList, final int maxListSize, @Nonnull final AggregationMetadata entityAggregations) { @@ -176,7 +185,7 @@ private List getQuickFiltersFromList( .filter(val -> val.getValue().equals(entityType)) .findFirst(); if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); + entityTypes.add(mapQuickFilter(context, ENTITY_FILTER_NAME, entityFilter.get())); } } }); @@ -185,13 +194,15 @@ private List getQuickFiltersFromList( } private QuickFilter mapQuickFilter( - @Nonnull final String field, @Nonnull final FilterValue filterValue) { + @Nullable QueryContext context, + @Nonnull final String field, + @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(convertFilterValue(filterValue.getValue(), isEntityTypeFilter)); if (filterValue.getEntity() != null) { - final Entity entity = UrnToEntityMapper.map(filterValue.getEntity()); + final Entity entity = UrnToEntityMapper.map(context, filterValue.getEntity()); quickFilter.setEntity(entity); } return quickFilter; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index 742d1d170de64b..e4c224c4c84012 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -8,9 +8,9 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ScrollAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.ScrollResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; @@ -72,10 +72,12 @@ public CompletableFuture get(DataFetchingEnvironment environment) : null; final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); + } else { + searchFlags = null; } try { @@ -89,7 +91,11 @@ public CompletableFuture get(DataFetchingEnvironment environment) String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; return UrnScrollResultsMapper.map( + context, _entityClient.scrollAcrossEntities( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -101,9 +107,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) : baseFilter, scrollId, keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); + count)); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index adab62c22bb724..f5ce7f82573554 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -11,10 +11,12 @@ import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageInput; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.common.mappers.LineageFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.LineageFlags; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; @@ -73,10 +75,19 @@ public CompletableFuture get(DataFetchingEnvironment String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; @Nullable - final Long startTimeMillis = - input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable - final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); + Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); + + final LineageFlags lineageFlags = LineageFlagsInputMapper.map(context, input.getLineageFlags()); + if (lineageFlags.getStartTimeMillis() == null && startTimeMillis != null) { + lineageFlags.setStartTimeMillis(startTimeMillis); + } + + if (lineageFlags.getEndTimeMillis() == null && endTimeMillis != null) { + lineageFlags.setEndTimeMillis(endTimeMillis); + } + ; com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); @@ -93,7 +104,7 @@ public CompletableFuture get(DataFetchingEnvironment scrollId, count); - SearchFlags searchFlags = null; + final SearchFlags searchFlags; final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { @@ -102,9 +113,16 @@ public CompletableFuture get(DataFetchingEnvironment .setSkipCache(inputFlags.getSkipCache()) .setFulltext(inputFlags.getFulltext()) .setMaxAggValues(inputFlags.getMaxAggValues()); + } else { + searchFlags = null; } return UrnScrollAcrossLineageResultsMapper.map( + context, _entityClient.scrollAcrossLineage( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags) + .withLineageFlags(flags -> lineageFlags != null ? lineageFlags : flags), urn, resolvedDirection, entityNames, @@ -114,11 +132,7 @@ public CompletableFuture get(DataFetchingEnvironment null, scrollId, keepAlive, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); + count)); } catch (RemoteInvocationException e) { log.error( "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index f8178e3b396cb5..0d7f217bb02db0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -60,7 +60,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) @@ -76,7 +76,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) count); return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), maybeResolvedView != null ? SearchUtils.intersectEntityTypes( entityNames, maybeResolvedView.getDefinition().getEntityTypes()) @@ -88,9 +90,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) : baseFilter, start, count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); + sortCriterion)); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 0f5d2d90ba0c29..8df6c241f2965f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -2,47 +2,87 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static com.linkedin.metadata.Constants.QUERY_ENTITY_NAME; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageInput; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.common.mappers.LineageFlagsInputMapper; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.LineageFlags; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.r2.RemoteInvocationException; +import graphql.VisibleForTesting; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nullable; -import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; /** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j -@RequiredArgsConstructor public class SearchAcrossLineageResolver implements DataFetcher> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; + private static final Set TRANSIENT_ENTITIES = ImmutableSet.of(QUERY_ENTITY_NAME); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + + @VisibleForTesting final Set _allEntities; + private final List _allowedEntities; + + public SearchAcrossLineageResolver(EntityClient entityClient, EntityRegistry entityRegistry) { + this._entityClient = entityClient; + this._entityRegistry = entityRegistry; + this._allEntities = + entityRegistry.getEntitySpecs().values().stream() + .map(EntitySpec::getName) + .collect(Collectors.toSet()); + + this._allowedEntities = + this._allEntities.stream() + .filter(e -> !TRANSIENT_ENTITIES.contains(e)) + .collect(Collectors.toList()); + } + + private List getEntityNamesFromInput(List inputTypes) { + if (inputTypes != null && !inputTypes.isEmpty()) { + return inputTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } else { + return this._allowedEntities; + } + } + @Override public CompletableFuture get(DataFetchingEnvironment environment) throws URISyntaxException { log.debug("Entering search across lineage graphql resolver"); + final QueryContext context = environment.getContext(); + final SearchAcrossLineageInput input = bindArgument(environment.getArgument("input"), SearchAcrossLineageInput.class); @@ -50,12 +90,7 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); - List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) - ? SEARCHABLE_ENTITY_TYPES - : input.getTypes(); - List entityNames = - entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List entityNames = getEntityNamesFromInput(input.getTypes()); // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = @@ -63,15 +98,28 @@ public CompletableFuture get(DataFetchingEnvironment final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = - input.getFilters() != null ? input.getFilters() : new ArrayList<>(); - final Integer maxHops = getMaxHops(filters); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); + final Integer maxHops = getMaxHops(facetFilters); @Nullable - final Long startTimeMillis = - input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable - final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); + Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); + + final LineageFlags lineageFlags = LineageFlagsInputMapper.map(context, input.getLineageFlags()); + if (lineageFlags.getStartTimeMillis() == null && startTimeMillis != null) { + lineageFlags.setStartTimeMillis(startTimeMillis); + } + + if (lineageFlags.getEndTimeMillis() == null && endTimeMillis != null) { + lineageFlags.setEndTimeMillis(endTimeMillis); + } com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); @@ -88,20 +136,24 @@ public CompletableFuture get(DataFetchingEnvironment start, count); - final Filter filter = ResolverUtils.buildFilter(filters, input.getOrFilters()); - SearchFlags searchFlags = null; + final Filter filter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); if (inputFlags.getSkipHighlighting() == null) { searchFlags.setSkipHighlighting(true); } } else { searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); } - - return UrnSearchAcrossLineageResultsMapper.map( + LineageSearchResult salResults = _entityClient.searchAcrossLineage( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags) + .withLineageFlags(flags -> lineageFlags), urn, resolvedDirection, entityNames, @@ -110,11 +162,9 @@ public CompletableFuture get(DataFetchingEnvironment filter, null, start, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); + count); + + return UrnSearchAcrossLineageResultsMapper.map(context, salResults); } catch (RemoteInvocationException e) { log.error( "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 68214238879237..ed9838b7074c71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,15 +1,20 @@ package com.linkedin.datahub.graphql.resolvers.search; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.GroupingCriterion; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; import com.linkedin.metadata.query.SearchFlags; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -28,7 +33,14 @@ public class SearchResolver implements DataFetcher get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); final SearchInput input = bindArgument(environment.getArgument("input"), SearchInput.class); final String entityName = EntityTypeMapper.getName(input.getType()); // escape forward slash since it is a reserved character in Elasticsearch @@ -47,7 +60,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } else { searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } @@ -66,15 +79,15 @@ public CompletableFuture get(DataFetchingEnvironment environment) searchFlags); return UrnSearchResultsMapper.map( + context, _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> searchFlags), entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), null, start, - count, - ResolverUtils.getAuthentication(environment), - searchFlags)); + count)); } catch (Exception e) { log.error( "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index d04cb57e1a860e..c9d23fd0263a13 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -19,10 +19,11 @@ import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -88,10 +89,23 @@ private SearchUtils() {} EntityType.TAG, EntityType.CORP_USER, EntityType.CORP_GROUP, - EntityType.ROLE, EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); + /** Entities that are part of browse by default */ + public static final List BROWSE_ENTITY_TYPES = + ImmutableList.of( + EntityType.DATASET, + EntityType.DASHBOARD, + EntityType.CHART, + EntityType.CONTAINER, + EntityType.MLMODEL, + EntityType.MLMODEL_GROUP, + EntityType.MLFEATURE_TABLE, + EntityType.DATA_FLOW, + EntityType.DATA_JOB, + EntityType.NOTEBOOK); + /** A prioritized list of source filter types used to generate quick filters */ public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( @@ -274,10 +288,11 @@ public static Integer getMaxHops(List filters) { } public static SearchFlags mapInputFlags( + @Nullable QueryContext context, com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } return searchFlags; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java new file mode 100644 index 00000000000000..7d204f2970158c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java @@ -0,0 +1,172 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.PropertyValueInput; +import com.linkedin.datahub.graphql.generated.UpsertStructuredPropertiesInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.StructuredPropertyUtils; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import graphql.com.google.common.collect.ImmutableSet; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class UpsertStructuredPropertiesResolver + implements DataFetcher< + CompletableFuture> { + + private final EntityClient _entityClient; + + public UpsertStructuredPropertiesResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final Authentication authentication = context.getAuthentication(); + + final UpsertStructuredPropertiesInput input = + bindArgument(environment.getArgument("input"), UpsertStructuredPropertiesInput.class); + final Urn assetUrn = UrnUtils.getUrn(input.getAssetUrn()); + Map> updateMap = new HashMap<>(); + // create a map of updates from our input + input + .getStructuredPropertyInputParams() + .forEach(param -> updateMap.put(param.getStructuredPropertyUrn(), param.getValues())); + + return CompletableFuture.supplyAsync( + () -> { + try { + // check authorization first + if (!AuthorizationUtils.canEditProperties(assetUrn, context)) { + throw new AuthorizationException( + String.format( + "Not authorized to update properties on the gives urn %s", assetUrn)); + } + + final AuditStamp auditStamp = + AuditStampUtils.createAuditStamp(authentication.getActor().toUrnStr()); + + if (!_entityClient.exists(assetUrn, authentication)) { + throw new RuntimeException( + String.format("Asset with provided urn %s does not exist", assetUrn)); + } + + // get or default the structured properties aspect + StructuredProperties structuredProperties = + getStructuredProperties(assetUrn, authentication); + + // update the existing properties based on new value + StructuredPropertyValueAssignmentArray properties = + updateExistingProperties(structuredProperties, updateMap, auditStamp); + + // append any new properties from our input + addNewProperties(properties, updateMap, auditStamp); + + structuredProperties.setProperties(properties); + + // ingest change proposal + final MetadataChangeProposal structuredPropertiesProposal = + AspectUtils.buildMetadataChangeProposal( + assetUrn, STRUCTURED_PROPERTIES_ASPECT_NAME, structuredProperties); + + _entityClient.ingestProposal(structuredPropertiesProposal, authentication, false); + + return StructuredPropertiesMapper.map(context, structuredProperties); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } + + private StructuredProperties getStructuredProperties(Urn assetUrn, Authentication authentication) + throws Exception { + EntityResponse response = + _entityClient.getV2( + assetUrn.getEntityType(), + assetUrn, + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME), + authentication); + StructuredProperties structuredProperties = new StructuredProperties(); + structuredProperties.setProperties(new StructuredPropertyValueAssignmentArray()); + if (response != null && response.getAspects().containsKey(STRUCTURED_PROPERTIES_ASPECT_NAME)) { + structuredProperties = + new StructuredProperties( + response.getAspects().get(STRUCTURED_PROPERTIES_ASPECT_NAME).getValue().data()); + } + return structuredProperties; + } + + private StructuredPropertyValueAssignmentArray updateExistingProperties( + StructuredProperties structuredProperties, + Map> updateMap, + AuditStamp auditStamp) { + return new StructuredPropertyValueAssignmentArray( + structuredProperties.getProperties().stream() + .map( + propAssignment -> { + String propUrnString = propAssignment.getPropertyUrn().toString(); + if (updateMap.containsKey(propUrnString)) { + List valueList = updateMap.get(propUrnString); + PrimitivePropertyValueArray values = + new PrimitivePropertyValueArray( + valueList.stream() + .map(StructuredPropertyUtils::mapPropertyValueInput) + .collect(Collectors.toList())); + propAssignment.setValues(values); + propAssignment.setLastModified(auditStamp); + } + return propAssignment; + }) + .collect(Collectors.toList())); + } + + private void addNewProperties( + StructuredPropertyValueAssignmentArray properties, + Map> updateMap, + AuditStamp auditStamp) { + // first remove existing properties from updateMap so that we append only new properties + properties.forEach(prop -> updateMap.remove(prop.getPropertyUrn().toString())); + + updateMap.forEach( + (structuredPropUrn, values) -> { + StructuredPropertyValueAssignment valueAssignment = + new StructuredPropertyValueAssignment(); + valueAssignment.setPropertyUrn(UrnUtils.getUrn(structuredPropUrn)); + valueAssignment.setValues( + new PrimitivePropertyValueArray( + values.stream() + .map(StructuredPropertyUtils::mapPropertyValueInput) + .collect(Collectors.toList()))); + valueAssignment.setCreated(auditStamp); + valueAssignment.setLastModified(auditStamp); + properties.add(valueAssignment); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 153c95c697a774..9e3ca0f2d45a65 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -2,17 +2,14 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; import static com.linkedin.metadata.Constants.*; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; @@ -72,15 +69,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists( - UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } OwnerUtils.addCreatorAsOwner( - context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + context, tagUrn, OwnerEntityType.CORP_USER, _entityService); return tagUrn; } catch (Exception e) { log.error( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index 7b9290b4532b58..5ec3968841b091 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; import static com.linkedin.metadata.Constants.*; @@ -10,7 +11,6 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -33,7 +33,7 @@ public class SetTagColorResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService + private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override @@ -53,7 +53,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { + if (!_entityService.exists(tagUrn, true)) { throw new IllegalArgumentException( String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); } @@ -89,7 +89,7 @@ public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, U final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, + ALL_PRIVILEGES_GROUP, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())))); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index f345d9ceb21e52..3f4a0367af05ad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -12,7 +12,6 @@ import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -57,13 +56,14 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all group Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), Constants.TEST_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Now that we have entities we can bind this to a result. final ListTestsResult result = new ListTestsResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java index 922c28097f83c4..ae23e963cebb90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import com.datahub.authorization.AuthUtil; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -14,14 +15,14 @@ import com.linkedin.test.TestDefinition; import com.linkedin.test.TestDefinitionType; import java.util.Map; -import java.util.Optional; import javax.annotation.Nonnull; public class TestUtils { /** Returns true if the authenticated user is able to manage tests. */ public static boolean canManageTests(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } public static TestDefinition mapDefinition(final TestDefinitionInput testDefInput) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java new file mode 100644 index 00000000000000..cb0d24839056dc --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java @@ -0,0 +1,25 @@ +package com.linkedin.datahub.graphql.resolvers.type; + +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import graphql.TypeResolutionEnvironment; +import graphql.schema.GraphQLObjectType; +import graphql.schema.TypeResolver; + +public class PropertyValueResolver implements TypeResolver { + + public static final String STRING_VALUE = "StringValue"; + public static final String NUMBER_VALUE = "NumberValue"; + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringValue) { + return env.getSchema().getObjectType(STRING_VALUE); + } else if (env.getObject() instanceof NumberValue) { + return env.getSchema().getObjectType(NUMBER_VALUE); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 215d53299c8ac1..effde5127b7f68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.types.corpuser.mappers.CorpUserMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -25,6 +24,7 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class ListUsersResolver implements DataFetcher> { @@ -57,13 +57,14 @@ public CompletableFuture get(final DataFetchingEnvironment envi // First, get all policy Urns. final SearchResult gmsResult = _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); // Then, get hydrate all users. final Map entities = @@ -81,7 +82,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi result.setStart(gmsResult.getFrom()); result.setCount(gmsResult.getPageSize()); result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); + result.setUsers(mapEntities(context, entities.values())); return result; } catch (Exception e) { throw new RuntimeException("Failed to list users", e); @@ -92,7 +93,8 @@ public CompletableFuture get(final DataFetchingEnvironment envi "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private List mapEntities(final Collection entities) { - return entities.stream().map(CorpUserMapper::map).collect(Collectors.toList()); + private static List mapEntities( + @Nullable QueryContext context, final Collection entities) { + return entities.stream().map(e -> CorpUserMapper.map(context, e)).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index caa37f82648544..80d33b84b4c763 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -15,7 +15,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -68,14 +67,13 @@ public CompletableFuture get(final DataFetchingEnvironment envi final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.DATAHUB_VIEW_ENTITY_NAME, query, buildFilters(), DEFAULT_SORT_CRITERION, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); final ListViewsResult result = new ListViewsResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index 945d2d50bcc3e1..fd029f9d6d3b2c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -71,14 +70,13 @@ public CompletableFuture get(final DataFetchingEnvironment envi final SearchResult gmsResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.DATAHUB_VIEW_ENTITY_NAME, query, buildFilters(viewType, context.getActorUrn()), DEFAULT_SORT_CRITERION, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + count); final ListViewsResult result = new ListViewsResult(); result.setStart(gmsResult.getFrom()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 5a52a57d9c374d..4f209ad9472aba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -16,6 +16,7 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** Resolver responsible for updating a particular DataHub View */ @@ -49,7 +50,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm context.getAuthentication(), System.currentTimeMillis()); log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); + return getView(context, urn, context.getAuthentication()); } throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); @@ -63,7 +64,9 @@ public CompletableFuture get(final DataFetchingEnvironment environm } private DataHubView getView( - @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + @Nullable QueryContext context, + @Nonnull final Urn urn, + @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { @@ -71,6 +74,6 @@ private DataHubView getView( String.format( "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } - return DataHubViewMapper.map(maybeResponse); + return DataHubViewMapper.map(context, maybeResponse); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index 9da5f915ff31d6..e6078a22835f4b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -11,8 +11,8 @@ import com.linkedin.datahub.graphql.generated.DataHubViewFilterInput; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.CriterionArray; @@ -80,9 +80,7 @@ public static boolean canUpdateView( } // If the View is Personal, then the current actor must be the owner. - return isViewOwner( - viewInfo.getCreated().getActor(), - UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getActorUrn())); } /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index 00e9badf5e3456..6e4259dde18c34 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -1,23 +1,31 @@ package com.linkedin.datahub.graphql.types.aspect; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaMetadataMapper; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); - public static Aspect map(@Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(aspect, entityUrn); + public static Aspect map( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, aspect, entityUrn); } - public Aspect apply(@Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + public Aspect apply( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { if (Constants.SCHEMA_METADATA_ASPECT_NAME.equals(aspect.getName())) { - return SchemaMetadataMapper.map(aspect, entityUrn); + return SchemaMetadataMapper.map(context, aspect, entityUrn); } return null; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index 45e80822b12c8c..9542b4600cd2be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.aspect; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -46,6 +48,7 @@ public List> batchLoad( @Nonnull List keys, @Nonnull QueryContext context) throws Exception { try { + return keys.stream() .map( key -> { @@ -53,11 +56,13 @@ public List> batchLoad( Urn entityUrn = Urn.createFromString(key.getUrn()); Map response = - _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication()); + canView(context.getOperationContext(), entityUrn) + ? _entityClient.batchGetV2( + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName()), + context.getAuthentication()) + : Map.of(); EntityResponse entityResponse = response.get(entityUrn); @@ -69,7 +74,7 @@ public List> batchLoad( final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); return DataFetcherResult.newResult() - .data(AspectMapper.map(aspect, entityUrn)) + .data(AspectMapper.map(context, aspect, entityUrn)) .build(); } catch (Exception e) { if (e instanceof RestLiResponseException) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index 2536f4d2521ee0..c6f80f1d2cf47b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -2,8 +2,10 @@ import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.AssertionStdAggregation; import com.linkedin.datahub.graphql.generated.AssertionStdOperator; @@ -24,10 +26,11 @@ import com.linkedin.metadata.Constants; import java.util.Collections; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class AssertionMapper { - public static Assertion map(final EntityResponse entityResponse) { + public static Assertion map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Assertion result = new Assertion(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -38,7 +41,8 @@ public static Assertion map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { - result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); + result.setInfo( + mapAssertionInfo(context, new AssertionInfo(envelopedAssertionInfo.getValue().data()))); } final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); @@ -46,30 +50,44 @@ public static Assertion map(final EntityResponse entityResponse) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); result.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } + final EnvelopedAspect envelopedStatus = aspects.get(Constants.STATUS_ASPECT_NAME); + if (envelopedStatus != null) { + result.setStatus(mapStatus(new Status(envelopedStatus.getValue().data()))); + } + + return result; + } + + private static com.linkedin.datahub.graphql.generated.Status mapStatus(Status status) { + final com.linkedin.datahub.graphql.generated.Status result = + new com.linkedin.datahub.graphql.generated.Status(); + result.setRemoved(status.isRemoved()); return result; } private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertionInfo( - final AssertionInfo gmsAssertionInfo) { + @Nullable QueryContext context, final AssertionInfo gmsAssertionInfo) { final com.linkedin.datahub.graphql.generated.AssertionInfo assertionInfo = new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); if (gmsAssertionInfo.hasDatasetAssertion()) { DatasetAssertionInfo datasetAssertion = - mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + mapDatasetAssertionInfo(context, gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } + assertionInfo.setDescription(gmsAssertionInfo.getDescription()); return assertionInfo; } private static DatasetAssertionInfo mapDatasetAssertionInfo( + @Nullable QueryContext context, final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); @@ -102,7 +120,7 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( } if (gmsDatasetAssertion.hasNativeParameters()) { datasetAssertion.setNativeParameters( - StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + StringMapMapper.map(context, gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index ac5cce1191e5dd..9b411033c10904 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -62,7 +62,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : assertionUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -72,7 +72,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(AssertionMapper.map(gmsResult)) + .data(AssertionMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index bfe2ccbe34166d..eeb4b91f3a1c53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -60,7 +60,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(keys.size()); for (Urn urn : tokenInfoUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -70,7 +70,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) + .data(AccessTokenMetadataMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index a519a65e5cb6b8..9c807bf0304add 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,6 +2,7 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -10,17 +11,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AccessTokenMetadataMapper implements ModelMapper { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); - public static AccessTokenMetadata map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static AccessTokenMetadata map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { + public AccessTokenMetadata apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse input) { final AccessTokenMetadata metadata = new AccessTokenMetadata(); metadata.setUrn(input.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index ba8e96159b0bf5..d5f976fa8f9b65 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -39,7 +39,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -81,7 +80,9 @@ public class ChartType EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME); + SUB_TYPES_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); @@ -116,6 +117,7 @@ public List> batchLoad( @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + final Map chartMap = _entityClient.batchGetV2( CHART_ENTITY_NAME, @@ -123,7 +125,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(chartMap.getOrDefault(urn, null)); } @@ -133,7 +135,7 @@ public List> batchLoad( gmsChart == null ? null : DataFetcherResult.newResult() - .data(ChartMapper.map(gmsChart)) + .data(ChartMapper.map(context, gmsChart)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -152,14 +154,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "chart", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -171,8 +172,8 @@ public AutoCompleteResults autoComplete( @Nonnull QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("chart", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete(context.getOperationContext(), "chart", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -188,8 +189,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "chart", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "chart", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -197,7 +203,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } private ChartUrn getChartUrn(String urnStr) { @@ -214,9 +220,9 @@ public Chart update( @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = ChartUpdateInputMapper.map(input, actor); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + ChartUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -237,7 +243,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CHART_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 0ef52c9f457168..561c3b9bec1e03 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.chart.EditableChartProperties; @@ -7,6 +8,7 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InputFields; @@ -16,6 +18,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AccessLevel; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.ChartEditableProperties; @@ -41,8 +45,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -50,19 +56,23 @@ import com.linkedin.metadata.key.ChartKey; import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class ChartMapper implements ModelMapper { public static final ChartMapper INSTANCE = new ChartMapper(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Chart map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { + public Chart apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Chart result = new Chart(); Urn entityUrn = entityResponse.getUrn(); @@ -75,53 +85,74 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) { MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); mappingHelper.mapToResult( - CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); + CHART_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapChartInfo(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); mappingHelper.mapToResult( EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> - chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + chart.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (chart, dataMap) -> chart.setStatus(StatusMapper.map(new Status(dataMap)))); + (chart, dataMap) -> chart.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> this.mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> chart.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> - chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + chart.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, ChartMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, ChartMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, - (chart, dataMap) -> chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + (chart, dataMap) -> + chart.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> - chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); + chart.setInputFields( + InputFieldsMapper.map(context, new InputFields(dataMap), entityUrn))); mappingHelper.mapToResult( - EMBED_ASPECT_NAME, (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + EMBED_ASPECT_NAME, + (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> - chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + chart.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((chart, dataMap) -> + chart.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Chart.class); + } else { + return mappingHelper.getResult(); + } } private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { @@ -140,14 +171,20 @@ private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { } private void mapChartInfo( - @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull Chart chart, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); + chart.setInfo(mapInfo(context, gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(context, gmsChartInfo, entityUrn)); } /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + private ChartInfo mapInfo( + @Nonnull QueryContext context, + final com.linkedin.chart.ChartInfo info, + @Nonnull Urn entityUrn) { final ChartInfo result = new ChartInfo(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -171,10 +208,10 @@ private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn if (info.hasType()) { result.setType(ChartType.valueOf(info.getType().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } if (info.hasExternalUrl()) { result.setExternalUrl(info.getExternalUrl().toString()); @@ -189,8 +226,10 @@ private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn } /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ - private ChartProperties mapChartInfoToProperties( - final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + private static ChartProperties mapChartInfoToProperties( + @Nullable final QueryContext context, + final com.linkedin.chart.ChartInfo info, + @Nonnull Urn entityUrn) { final ChartProperties result = new ChartProperties(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -202,10 +241,10 @@ private ChartProperties mapChartInfoToProperties( if (info.hasType()) { result.setType(ChartType.valueOf(info.getType().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } if (info.hasExternalUrl()) { result.setExternalUrl(info.getExternalUrl().toString()); @@ -238,15 +277,19 @@ private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap d chart.setEditableProperties(chartEditableProperties); } - private void mapGlobalTags( - @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Chart chart, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); chart.setGlobalTags(globalTags); chart.setTags(globalTags); } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull Chart chart, @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); chart.setContainer( @@ -256,8 +299,9 @@ private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { .build()); } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Chart chart, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); + chart.setDomain(DomainAssociationMapper.map(context, domains, chart.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index f2a434b58686c5..806e537c6ec261 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -8,6 +8,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ChartUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,6 +19,7 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class ChartUpdateInputMapper implements InputModelMapper, Urn> { @@ -25,13 +27,17 @@ public class ChartUpdateInputMapper public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); public static Collection map( - @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final ChartUpdateInput chartUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, chartUpdateInput, actor); } @Override public Collection apply( - @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final ChartUpdateInput chartUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -41,7 +47,7 @@ public Collection apply( if (chartUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, chartUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -51,7 +57,7 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( chartUpdateInput.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } // Tags overrides global tags if provided @@ -59,7 +65,7 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( chartUpdateInput.getTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index 4da18403f95cca..49c2d17ce09585 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -2,22 +2,28 @@ import com.linkedin.common.InputFields; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InputField; import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaFieldMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InputFieldsMapper { public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); public static com.linkedin.datahub.graphql.generated.InputFields map( - @Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + @Nullable final QueryContext context, + @Nonnull final InputFields metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); } public com.linkedin.datahub.graphql.generated.InputFields apply( - @Nonnull final InputFields input, @Nonnull final Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final InputFields input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); result.setFields( @@ -28,7 +34,7 @@ public com.linkedin.datahub.graphql.generated.InputFields apply( if (field.hasSchemaField()) { fieldResult.setSchemaField( - SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); + SchemaFieldMapper.map(context, field.getSchemaField(), entityUrn)); } if (field.hasSchemaFieldUrn()) { fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index 1f952bb6a2bd1f..851569a6cc5827 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -13,12 +15,14 @@ public class AuditStampMapper implements ModelMapper { public static final BrowsePathsV2Mapper INSTANCE = new BrowsePathsV2Mapper(); - public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { - return INSTANCE.apply(metadata); + public static BrowsePathV2 map( + @Nullable QueryContext context, @Nonnull final BrowsePathsV2 metadata) { + return INSTANCE.apply(context, metadata); } @Override - public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { + public BrowsePathV2 apply(@Nullable QueryContext context, @Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); final List path = - input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + input.getPath().stream() + .map(p -> mapBrowsePathEntry(context, p)) + .collect(Collectors.toList()); result.setPath(path); return result; } - private BrowsePathEntry mapBrowsePathEntry(com.linkedin.common.BrowsePathEntry pathEntry) { + private BrowsePathEntry mapBrowsePathEntry( + @Nullable QueryContext context, com.linkedin.common.BrowsePathEntry pathEntry) { final BrowsePathEntry entry = new BrowsePathEntry(); entry.setName(pathEntry.getId()); if (pathEntry.hasUrn() && pathEntry.getUrn() != null) { - entry.setEntity(UrnToEntityMapper.map(pathEntry.getUrn())); + entry.setEntity(UrnToEntityMapper.map(context, pathEntry.getUrn())); } return entry; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index e3a09bc8926a30..14fd1c82d5df7a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; public class ChangeAuditStampsMapper implements ModelMapper { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); - public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { - return INSTANCE.apply(input); + public static ChangeAuditStamps map( + @Nullable QueryContext context, com.linkedin.common.ChangeAuditStamps input) { + return INSTANCE.apply(context, input); } @Override - public ChangeAuditStamps apply(com.linkedin.common.ChangeAuditStamps input) { + public ChangeAuditStamps apply( + @Nullable QueryContext context, com.linkedin.common.ChangeAuditStamps input) { ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); - changeAuditStamps.setCreated(AuditStampMapper.map(input.getCreated())); - changeAuditStamps.setLastModified(AuditStampMapper.map(input.getLastModified())); + changeAuditStamps.setCreated(AuditStampMapper.map(context, input.getCreated())); + changeAuditStamps.setLastModified(AuditStampMapper.map(context, input.getLastModified())); if (input.hasDeleted()) { - changeAuditStamps.setDeleted(AuditStampMapper.map(input.getDeleted())); + changeAuditStamps.setDeleted(AuditStampMapper.map(context, input.getDeleted())); } return changeAuditStamps; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 806e8e6aadc5b1..bb35a6da984189 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,24 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.NonNull; public class CostMapper implements ModelMapper { public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); + public static Cost map( + @Nullable QueryContext context, @NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(context, cost); } @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { + public Cost apply(@Nullable QueryContext context, @Nonnull final com.linkedin.common.Cost cost) { final Cost result = new Cost(); result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); + result.setCostValue(CostValueMapper.map(context, cost.getCost())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 56c107f7ec0596..c71c2274362b8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class CostValueMapper implements ModelMapper { public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); + public static CostValue map( + @Nullable QueryContext context, @NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(context, costValue); } @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { + public CostValue apply( + @Nullable QueryContext context, @NonNull final com.linkedin.common.CostValue costValue) { final CostValue result = new CostValue(); if (costValue.isCostCode()) { result.setCostCode(costValue.getCostCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index a2236f7e8586d5..4345819867617b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformInstanceAspectMapper implements ModelMapper { @@ -12,12 +14,15 @@ public class DataPlatformInstanceAspectMapper new DataPlatformInstanceAspectMapper(); public static DataPlatformInstance map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { - return INSTANCE.apply(dataPlatformInstance); + return INSTANCE.apply(context, dataPlatformInstance); } @Override - public DataPlatformInstance apply(@Nonnull final com.linkedin.common.DataPlatformInstance input) { + public DataPlatformInstance apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.DataPlatformInstance input) { final DataPlatformInstance result = new DataPlatformInstance(); if (input.hasInstance()) { result.setType(EntityType.DATA_PLATFORM_INSTANCE); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 7a884741669159..8c3d72edfed25a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DeprecationMapper implements ModelMapper { public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); + public static Deprecation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(context, deprecation); } @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { + public Deprecation apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Deprecation input) { final Deprecation result = new Deprecation(); result.setActor(input.getActor().toString()); result.setDeprecated(input.isDeprecated()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 339c6a848d9f3d..51801c43061e8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EmbedMapper implements ModelMapper { public static final EmbedMapper INSTANCE = new EmbedMapper(); - public static Embed map(@Nonnull final com.linkedin.common.Embed metadata) { - return INSTANCE.apply(metadata); + public static Embed map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Embed metadata) { + return INSTANCE.apply(context, metadata); } @Override - public Embed apply(@Nonnull final com.linkedin.common.Embed input) { + public Embed apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Embed input) { final Embed result = new Embed(); result.setRenderUrl(input.getRenderUrl()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java index 830cbb0e79d797..c1483f8d6d9638 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java @@ -45,6 +45,12 @@ public List apply( .map(FineGrainedLineagesMapper::mapDatasetSchemaField) .collect(Collectors.toList())); } + if (fineGrainedLineage.hasQuery()) { + resultEntry.setQuery(fineGrainedLineage.getQuery().toString()); + } + if (fineGrainedLineage.hasTransformOperation()) { + resultEntry.setTransformOperation(fineGrainedLineage.getTransformOperation()); + } result.add(resultEntry); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java new file mode 100644 index 00000000000000..cf0603d6d49739 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java @@ -0,0 +1,32 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.GroupingCriterion; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class GroupingCriterionInputMapper + implements ModelMapper { + + public static final GroupingCriterionInputMapper INSTANCE = new GroupingCriterionInputMapper(); + + public static com.linkedin.metadata.query.GroupingCriterion map( + @Nullable QueryContext context, @Nonnull final GroupingCriterion groupingCriterion) { + return INSTANCE.apply(context, groupingCriterion); + } + + @Override + public com.linkedin.metadata.query.GroupingCriterion apply( + @Nullable QueryContext context, GroupingCriterion input) { + return new com.linkedin.metadata.query.GroupingCriterion() + .setBaseEntityType( + input.getBaseEntityType() != null + ? EntityTypeMapper.getName(input.getBaseEntityType()) + : null, + SetMode.REMOVE_OPTIONAL_IF_NULL) + .setGroupingEntityType(EntityTypeMapper.getName(input.getGroupingEntityType())); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 4546e0e4d8dc00..c57e7fd30da988 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -1,25 +1,31 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMapper { public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); public static InstitutionalMemory map( - @Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemory memory, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, memory, entityUrn); } public InstitutionalMemory apply( - @Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemory input, + @Nonnull final Urn entityUrn) { final InstitutionalMemory result = new InstitutionalMemory(); result.setElements( input.getElements().stream() - .map(metadata -> InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)) + .map(metadata -> InstitutionalMemoryMetadataMapper.map(context, metadata, entityUrn)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index 49a46185070865..7c6de02ecc8767 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMetadataMapper { @@ -11,12 +13,14 @@ public class InstitutionalMemoryMetadataMapper { new InstitutionalMemoryMetadataMapper(); public static InstitutionalMemoryMetadata map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + return INSTANCE.apply(context, metadata, entityUrn); } public InstitutionalMemoryMetadata apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); @@ -24,7 +28,7 @@ public InstitutionalMemoryMetadata apply( result.setDescription(input.getDescription()); // deprecated field result.setLabel(input.getDescription()); result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); + result.setCreated(AuditStampMapper.map(context, input.getCreateStamp())); result.setAssociatedUrn(entityUrn.toString()); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 87d865471708e8..0219f91e60e6d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -3,10 +3,12 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper { @@ -15,12 +17,13 @@ public class InstitutionalMemoryMetadataUpdateMapper new InstitutionalMemoryMetadataUpdateMapper(); public static InstitutionalMemoryMetadata map( - @Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(context, input); } @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { + public InstitutionalMemoryMetadata apply( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryMetadataUpdate input) { final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); metadata.setDescription(input.getDescription()); metadata.setUrl(new Url(input.getUrl())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index d8b451458e72c7..d8bdd354d4ad52 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -2,10 +2,12 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryUpdateMapper implements ModelMapper { @@ -13,17 +15,19 @@ public class InstitutionalMemoryUpdateMapper private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); + public static InstitutionalMemory map( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(context, input); } @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { + public InstitutionalMemory apply( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryUpdate input) { final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); institutionalMemory.setElements( new InstitutionalMemoryMetadataArray( input.getElements().stream() - .map(InstitutionalMemoryMetadataUpdateMapper::map) + .map(e -> InstitutionalMemoryMetadataUpdateMapper.map(context, e)) .collect(Collectors.toList()))); return institutionalMemory; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/LineageFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/LineageFlagsInputMapper.java new file mode 100644 index 00000000000000..43c24c9630d646 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/LineageFlagsInputMapper.java @@ -0,0 +1,71 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.UrnArray; +import com.linkedin.common.UrnArrayMap; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityTypeToPlatforms; +import com.linkedin.datahub.graphql.generated.LineageFlags; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * Maps GraphQL SearchFlags to Pegasus + * + *

To be replaced by auto-generated mappers implementations + */ +public class LineageFlagsInputMapper + implements ModelMapper { + + public static final LineageFlagsInputMapper INSTANCE = new LineageFlagsInputMapper(); + + @Nonnull + public static com.linkedin.metadata.query.LineageFlags map( + QueryContext queryContext, @Nonnull final LineageFlags lineageFlags) { + return INSTANCE.apply(queryContext, lineageFlags); + } + + @Override + public com.linkedin.metadata.query.LineageFlags apply( + QueryContext context, @Nullable final LineageFlags lineageFlags) { + com.linkedin.metadata.query.LineageFlags result = + new com.linkedin.metadata.query.LineageFlags(); + if (lineageFlags == null) { + return result; + } + if (lineageFlags.getIgnoreAsHops() != null) { + result.setIgnoreAsHops(mapIgnoreAsHops(lineageFlags.getIgnoreAsHops())); + } + if (lineageFlags.getEndTimeMillis() != null) { + result.setEndTimeMillis(lineageFlags.getEndTimeMillis()); + } + if (lineageFlags.getStartTimeMillis() != null) { + result.setStartTimeMillis(lineageFlags.getStartTimeMillis()); + } + if (lineageFlags.getEntitiesExploredPerHopLimit() != null) { + result.setEntitiesExploredPerHopLimit(lineageFlags.getEntitiesExploredPerHopLimit()); + } + return result; + } + + private static UrnArrayMap mapIgnoreAsHops(List ignoreAsHops) { + UrnArrayMap result = new UrnArrayMap(); + ignoreAsHops.forEach( + ignoreAsHop -> + result.put( + EntityTypeMapper.getName(ignoreAsHop.getEntityType()), + new UrnArray( + Optional.ofNullable(ignoreAsHop.getPlatforms()) + .orElse(Collections.emptyList()) + .stream() + .map(UrnUtils::getUrn) + .collect(Collectors.toList())))); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 37b625715edd5c..5ed6aa609946f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -3,6 +3,7 @@ import com.linkedin.common.Operation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; @@ -10,6 +11,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OperationMapper implements TimeSeriesAspectMapper { @@ -17,13 +19,13 @@ public class OperationMapper public static final OperationMapper INSTANCE = new OperationMapper(); public static com.linkedin.datahub.graphql.generated.Operation map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.Operation apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { Operation gmsProfile = GenericRecordUtils.deserializeAspect( @@ -49,7 +51,7 @@ public com.linkedin.datahub.graphql.generated.Operation apply( result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); } if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); + result.setCustomProperties(StringMapMapper.map(context, gmsProfile.getCustomProperties())); } if (gmsProfile.hasNumAffectedRows()) { result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index ea15aefdad3b79..8b4f9a1f4ca506 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -4,6 +4,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -11,6 +12,7 @@ import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -22,11 +24,16 @@ public class OwnerMapper { public static final OwnerMapper INSTANCE = new OwnerMapper(); public static Owner map( - @Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Owner owner, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, owner, entityUrn); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + public Owner apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Owner owner, + @Nonnull final Urn entityUrn) { final Owner result = new Owner(); // Deprecated result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); @@ -52,7 +59,7 @@ public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull fina result.setOwner(partialOwner); } if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); + result.setSource(OwnershipSourceMapper.map(context, owner.getSource())); } result.setAssociatedUrn(entityUrn.toString()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index a38c16d02f1215..5cf680d88281f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -6,23 +6,25 @@ import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnerUpdate; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.net.URISyntaxException; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OwnerUpdateMapper implements ModelMapper { private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + public static Owner map(@Nullable QueryContext context, @Nonnull final OwnerUpdate input) { + return INSTANCE.apply(context, input); } @Override - public Owner apply(@Nonnull final OwnerUpdate input) { + public Owner apply(@Nullable QueryContext context, @Nonnull final OwnerUpdate input) { final Owner owner = new Owner(); try { if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 31f637a047798e..a3a28717c9eb81 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Ownership; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,17 +17,21 @@ public class OwnershipMapper { public static final OwnershipMapper INSTANCE = new OwnershipMapper(); public static Ownership map( - @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Ownership ownership, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, ownership, entityUrn); } public Ownership apply( - @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Ownership ownership, + @Nonnull final Urn entityUrn) { final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); + result.setLastModified(AuditStampMapper.map(context, ownership.getLastModified())); result.setOwners( ownership.getOwners().stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) + .map(owner -> OwnerMapper.map(context, owner, entityUrn)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index 75eaffb850a8b2..12a38d9caa284e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -16,12 +18,15 @@ public class OwnershipSourceMapper public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); public static OwnershipSource map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); + return INSTANCE.apply(context, ownershipSource); } @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + public OwnershipSource apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { final OwnershipSource result = new OwnershipSource(); result.setUrl(ownershipSource.getUrl()); result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 97afbc7ddf8556..6ceccff8a9e76e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -5,26 +5,36 @@ import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper { private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); + public static Ownership map( + @Nullable QueryContext context, + @NonNull final OwnershipUpdate input, + @NonNull final Urn actor) { + return INSTANCE.apply(context, input, actor); } @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + public Ownership apply( + @Nullable QueryContext context, + @NonNull final OwnershipUpdate input, + @NonNull final Urn actor) { final Ownership ownership = new Ownership(); ownership.setOwners( new OwnerArray( - input.getOwners().stream().map(OwnerUpdateMapper::map).collect(Collectors.toList()))); + input.getOwners().stream() + .map(o -> OwnerUpdateMapper.map(context, o)) + .collect(Collectors.toList()))); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index e2d29d02974491..e6b75f9482f59f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,8 +1,13 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; +import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps GraphQL SearchFlags to Pegasus @@ -15,12 +20,13 @@ public class SearchFlagsInputMapper public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); public static com.linkedin.metadata.query.SearchFlags map( - @Nonnull final SearchFlags searchFlags) { - return INSTANCE.apply(searchFlags); + @Nullable QueryContext context, @Nonnull final SearchFlags searchFlags) { + return INSTANCE.apply(context, searchFlags); } @Override - public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags searchFlags) { + public com.linkedin.metadata.query.SearchFlags apply( + @Nullable QueryContext context, @Nonnull final SearchFlags searchFlags) { com.linkedin.metadata.query.SearchFlags result = new com.linkedin.metadata.query.SearchFlags(); if (searchFlags.getFulltext() != null) { result.setFulltext(searchFlags.getFulltext()); @@ -42,6 +48,22 @@ public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags if (searchFlags.getGetSuggestions() != null) { result.setGetSuggestions(searchFlags.getGetSuggestions()); } + if (searchFlags.getIncludeSoftDeleted() != null) { + result.setIncludeSoftDeleted(searchFlags.getIncludeSoftDeleted()); + } + if (searchFlags.getIncludeRestricted() != null) { + result.setIncludeRestricted(searchFlags.getIncludeRestricted()); + } + if (searchFlags.getGroupingSpec() != null + && searchFlags.getGroupingSpec().getGroupingCriteria() != null) { + result.setGroupingSpec( + new GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + searchFlags.getGroupingSpec().getGroupingCriteria().stream() + .map(c -> GroupingCriterionInputMapper.map(context, c)) + .collect(Collectors.toList())))); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 0758daf5df2e77..eea4bcd4a28d2f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SiblingProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,16 +19,21 @@ public class SiblingsMapper public static final SiblingsMapper INSTANCE = new SiblingsMapper(); - public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings siblings) { - return INSTANCE.apply(siblings); + public static SiblingProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Siblings siblings) { + return INSTANCE.apply(context, siblings); } @Override - public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { + public SiblingProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); result.setSiblings( - siblings.getSiblings().stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + siblings.getSiblings().stream() + .filter(s -> context == null | canView(context.getOperationContext(), s)) + .map(s -> UrnToEntityMapper.map(context, s)) + .collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 2d1efdffc496c9..f4f829a046f2ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class StatusMapper implements ModelMapper { public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); + public static Status map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(context, metadata); } @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { + public Status apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Status input) { final Status result = new Status(); result.setRemoved(input.isRemoved()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 0e8d6822b7d091..4175fdb2028653 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.StringMapEntry; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -16,12 +18,14 @@ public class StringMapMapper implements ModelMapper, List map(@Nonnull final Map input) { - return INSTANCE.apply(input); + public static List map( + @Nullable QueryContext context, @Nonnull final Map input) { + return INSTANCE.apply(context, input); } @Override - public List apply(@Nonnull final Map input) { + public List apply( + @Nullable QueryContext context, @Nonnull final Map input) { List results = new ArrayList<>(); for (String key : input.keySet()) { final StringMapEntry entry = new StringMapEntry(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java index 55294e4b46822c..924ee92d2f00f5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.SubTypes; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SubTypesMapper implements ModelMapper { @@ -11,12 +13,13 @@ public class SubTypesMapper public static final SubTypesMapper INSTANCE = new SubTypesMapper(); public static com.linkedin.datahub.graphql.generated.SubTypes map( - @Nonnull final SubTypes metadata) { - return INSTANCE.apply(metadata); + @Nullable QueryContext context, @Nonnull final SubTypes metadata) { + return INSTANCE.apply(context, metadata); } @Override - public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { + public com.linkedin.datahub.graphql.generated.SubTypes apply( + @Nullable QueryContext context, @Nonnull final SubTypes input) { final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); result.setTypeNames(new ArrayList<>(input.getTypeNames())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 4c452af1262012..00f2a0df7512c3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.Container; @@ -19,6 +20,7 @@ import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Domain; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; @@ -30,22 +32,27 @@ import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; +import com.linkedin.datahub.graphql.generated.QueryEntity; +import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UrnToEntityMapper implements ModelMapper { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); - public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { - return INSTANCE.apply(urn); + public static Entity map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.urn.Urn urn) { + return INSTANCE.apply(context, urn); } @Override - public Entity apply(Urn input) { + public Entity apply(@Nullable QueryContext context, Urn input) { Entity partialEntity = null; if (input.getEntityType().equals("dataset")) { partialEntity = new Dataset(); @@ -152,6 +159,11 @@ public Entity apply(Urn input) { ((Domain) partialEntity).setUrn(input.toString()); ((Domain) partialEntity).setType(EntityType.DOMAIN); } + if (input.getEntityType().equals("erModelRelationship")) { + partialEntity = new ERModelRelationship(); + ((ERModelRelationship) partialEntity).setUrn(input.toString()); + ((ERModelRelationship) partialEntity).setType(EntityType.ER_MODEL_RELATIONSHIP); + } if (input.getEntityType().equals("assertion")) { partialEntity = new Assertion(); ((Assertion) partialEntity).setUrn(input.toString()); @@ -192,6 +204,21 @@ public Entity apply(Urn input) { ((OwnershipTypeEntity) partialEntity).setUrn(input.toString()); ((OwnershipTypeEntity) partialEntity).setType(EntityType.CUSTOM_OWNERSHIP_TYPE); } + if (input.getEntityType().equals(STRUCTURED_PROPERTY_ENTITY_NAME)) { + partialEntity = new StructuredPropertyEntity(); + ((StructuredPropertyEntity) partialEntity).setUrn(input.toString()); + ((StructuredPropertyEntity) partialEntity).setType(EntityType.STRUCTURED_PROPERTY); + } + if (input.getEntityType().equals(QUERY_ENTITY_NAME)) { + partialEntity = new QueryEntity(); + ((QueryEntity) partialEntity).setUrn(input.toString()); + ((QueryEntity) partialEntity).setType(EntityType.QUERY); + } + if (input.getEntityType().equals(RESTRICTED_ENTITY_NAME)) { + partialEntity = new Restricted(); + ((Restricted) partialEntity).setUrn(input.toString()); + ((Restricted) partialEntity).setType(EntityType.RESTRICTED); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 0b156f11e8834b..d9eab8e1ce9492 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.EnvelopedAspectMap; import java.util.function.BiConsumer; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.AllArgsConstructor; import lombok.Getter; +import org.apache.commons.lang3.function.TriConsumer; @AllArgsConstructor public class MappingHelper { @@ -18,4 +21,14 @@ public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer consumer) { + if (_aspectMap.containsKey(aspectName)) { + DataMap dataMap = _aspectMap.get(aspectName).getValue().data(); + consumer.accept(context, result, dataMap); + } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 1200493666a592..d1cd9528322cff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -18,7 +18,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -51,7 +50,9 @@ public class ContainerType Constants.CONTAINER_ASPECT_NAME, Constants.DOMAINS_ASPECT_NAME, Constants.DEPRECATION_ASPECT_NAME, - Constants.DATA_PRODUCTS_ASPECT_NAME); + Constants.DATA_PRODUCTS_ASPECT_NAME, + Constants.STRUCTURED_PROPERTIES_ASPECT_NAME, + Constants.FORMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "container"; @@ -89,7 +90,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -99,7 +100,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(ContainerMapper.map(gmsResult)) + .data(ContainerMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -126,14 +127,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ENTITY_NAME, query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -145,7 +145,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index 07594c53c68312..2c0dc142bee3d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -4,6 +4,7 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -14,6 +15,7 @@ import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; @@ -26,19 +28,23 @@ import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nullable; public class ContainerMapper { @Nullable - public static Container map(final EntityResponse entityResponse) { + public static Container map( + @Nullable final QueryContext context, final EntityResponse entityResponse) { final Container result = new Container(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -54,7 +60,7 @@ public static Container map(final EntityResponse entityResponse) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); result.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); @@ -81,20 +87,22 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { result.setOwnership( - OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + OwnershipMapper.map( + context, new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { result.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + GlossaryTermsMapper.map( + context, new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedInstitutionalMemory = @@ -102,17 +110,20 @@ public static Container map(final EntityResponse entityResponse) { if (envelopedInstitutionalMemory != null) { result.setInstitutionalMemory( InstitutionalMemoryMapper.map( - new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + context, + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), + entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); if (statusAspect != null) { - result.setStatus(StatusMapper.map(new Status(statusAspect.getValue().data()))); + result.setStatus(StatusMapper.map(context, new Status(statusAspect.getValue().data()))); } final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { - result.setSubTypes(SubTypesMapper.map(new SubTypes(envelopedSubTypes.getValue().data()))); + result.setSubTypes( + SubTypesMapper.map(context, new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); @@ -130,13 +141,26 @@ public static Container map(final EntityResponse entityResponse) { if (envelopedDomains != null) { final Domains domains = new Domains(envelopedDomains.getValue().data()); // Currently we only take the first domain if it exists. - result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); + result.setDomain(DomainAssociationMapper.map(context, domains, entityUrn.toString())); } final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { result.setDeprecation( - DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + DeprecationMapper.map(context, new Deprecation(envelopedDeprecation.getValue().data()))); + } + + final EnvelopedAspect envelopedStructuredProps = aspects.get(STRUCTURED_PROPERTIES_ASPECT_NAME); + if (envelopedStructuredProps != null) { + result.setStructuredProperties( + StructuredPropertiesMapper.map( + context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); + } + + final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); + if (envelopedForms != null) { + result.setForms( + FormsMapper.map(new Forms(envelopedForms.getValue().data()), entityUrn.toString())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 371cf6b280c20e..9fa2a71251c6d1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -7,6 +7,7 @@ import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; @@ -30,7 +31,6 @@ import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -88,7 +88,7 @@ public List> batchLoad( null, context.getAuthentication()); - final List results = new ArrayList<>(); + final List results = new ArrayList<>(urns.size()); for (Urn urn : corpGroupUrns) { results.add(corpGroupMap.getOrDefault(urn, null)); } @@ -98,7 +98,7 @@ public List> batchLoad( gmsCorpGroup == null ? null : DataFetcherResult.newResult() - .data(CorpGroupMapper.map(gmsCorpGroup)) + .data(CorpGroupMapper.map(context, gmsCorpGroup)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -116,14 +116,13 @@ public SearchResults search( throws Exception { final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "corpGroup", query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -135,8 +134,9 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("corpGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete( + context.getOperationContext(), "corpGroup", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -189,7 +189,7 @@ private boolean isAuthorizedToUpdate( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); @@ -232,6 +232,9 @@ private RecordTemplate mapCorpGroupEditableInfo( if (input.getEmail() != null) { result.setEmail(input.getEmail()); } + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index a6e14535cf0b7f..ed22bb06fd5c26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -1,9 +1,14 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; import com.linkedin.data.template.GetMode; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -14,21 +19,33 @@ public class CorpGroupEditablePropertiesMapper implements ModelMapper< com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { + private final Logger _logger = + LoggerFactory.getLogger(CorpGroupEditablePropertiesMapper.class.getName()); + public static final CorpGroupEditablePropertiesMapper INSTANCE = new CorpGroupEditablePropertiesMapper(); public static CorpGroupEditableProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { - return INSTANCE.apply(corpGroupEditableInfo); + return INSTANCE.apply(context, corpGroupEditableInfo); } @Override public CorpGroupEditableProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); result.setSlack(corpGroupEditableInfo.getSlack(GetMode.DEFAULT)); result.setEmail(corpGroupEditableInfo.getEmail(GetMode.DEFAULT)); + com.linkedin.common.url.Url pictureLinkObject = + corpGroupEditableInfo.getPictureLink(GetMode.NULL); + String pictureLink = null; + if (pictureLinkObject != null) { + pictureLink = pictureLinkObject.toString(); + } + result.setPictureLink(pictureLink); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 04d0cc8ce94e63..918d7f19b99f12 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -17,12 +19,14 @@ public class CorpGroupInfoMapper public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); public static CorpGroupInfo map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); + return INSTANCE.apply(context, corpGroupInfo); } @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + public CorpGroupInfo apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo info) { final CorpGroupInfo result = new CorpGroupInfo(); result.setEmail(info.getEmail()); result.setDescription(info.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 52e200d19923a9..6246cf64bbf7f8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -2,21 +2,27 @@ import static com.linkedin.metadata.Constants.*; +import com.linkedin.common.Forms; import com.linkedin.common.Origin; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.key.CorpGroupKey; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -27,12 +33,14 @@ public class CorpGroupMapper implements ModelMapper { public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static CorpGroup map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { + public CorpGroup apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final CorpGroup result = new CorpGroup(); Urn entityUrn = entityResponse.getUrn(); @@ -41,10 +49,21 @@ public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult(context, CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); mappingHelper.mapToResult( - OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); + context, CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (entity, dataMap) -> this.mapOwnership(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); } else { @@ -61,20 +80,25 @@ private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap data corpGroup.setName(corpGroupKey.getName()); } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + private void mapCorpGroupInfo( + @Nullable QueryContext context, @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(context, corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(context, corpGroupInfo)); } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + private void mapCorpGroupEditableInfo( + @Nullable QueryContext context, @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { corpGroup.setEditableProperties( - CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); + CorpGroupEditablePropertiesMapper.map(context, new CorpGroupEditableInfo(dataMap))); } private void mapOwnership( - @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); + @Nullable QueryContext context, + @Nonnull CorpGroup corpGroup, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn)); } private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 29d0482863971c..3feef06b6cbb0b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -16,12 +18,14 @@ public class CorpGroupPropertiesMapper public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); public static CorpGroupProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); + return INSTANCE.apply(context, corpGroupInfo); } @Override - public CorpGroupProperties apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + public CorpGroupProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo info) { final CorpGroupProperties result = new CorpGroupProperties(); result.setEmail(info.getEmail()); result.setDescription(info.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index 5749eef970fce8..7b4f2bcf00d979 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -32,7 +32,6 @@ import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -88,7 +87,7 @@ public List> batchLoad( null, context.getAuthentication()); - final List results = new ArrayList<>(); + final List results = new ArrayList<>(urns.size()); for (Urn urn : corpUserUrns) { results.add(corpUserMap.getOrDefault(urn, null)); } @@ -98,7 +97,7 @@ public List> batchLoad( gmsCorpUser == null ? null : DataFetcherResult.newResult() - .data(CorpUserMapper.map(gmsCorpUser, _featureFlags)) + .data(CorpUserMapper.map(context, gmsCorpUser, _featureFlags)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -116,14 +115,13 @@ public SearchResults search( throws Exception { final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "corpuser", query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -135,8 +133,9 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete( + context.getOperationContext(), "corpuser", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } public Class inputClass() { @@ -181,7 +180,7 @@ private boolean isAuthorizedToUpdate( return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 3ee353293393e7..1ff2f069b8112c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,12 +17,14 @@ public class CorpUserEditableInfoMapper public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); public static CorpUserEditableProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); + return INSTANCE.apply(context, info); } @Override public CorpUserEditableProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { final CorpUserEditableProperties result = new CorpUserEditableProperties(); result.setDisplayName(info.getDisplayName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 9044f4d510bcf9..a728ea3695b508 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,12 +17,15 @@ public class CorpUserInfoMapper public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); + public static CorpUserInfo map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(context, corpUserInfo); } @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + public CorpUserInfo apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo info) { final CorpUserInfo result = new CorpUserInfo(); result.setActive(info.isActive()); result.setCountryCode(info.getCountryCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index 98783131a2d521..4fa278983399b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -2,10 +2,12 @@ import static com.linkedin.metadata.Constants.*; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserAppearanceSettings; @@ -15,6 +17,8 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -25,6 +29,7 @@ import com.linkedin.identity.CorpUserSettings; import com.linkedin.identity.CorpUserStatus; import com.linkedin.metadata.key.CorpUserKey; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -37,17 +42,22 @@ public class CorpUserMapper { public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); + public static CorpUser map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse, null); } public static CorpUser map( - @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); + @Nullable QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(context, entityResponse, featureFlags); } public CorpUser apply( - @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + @Nullable QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nullable final FeatureFlags featureFlags) { final CorpUser result = new CorpUser(); Urn entityUrn = entityResponse.getUrn(); @@ -58,21 +68,31 @@ public CorpUser apply( mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); mappingHelper.mapToResult( CORP_USER_INFO_ASPECT_NAME, - (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); + (corpUser, dataMap) -> this.mapCorpUserInfo(context, corpUser, dataMap, entityUrn)); mappingHelper.mapToResult( CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> corpUser.setEditableProperties( - CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); + CorpUserEditableInfoMapper.map(context, new CorpUserEditableInfo(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + corpUser.setGlobalTags( + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); mappingHelper.mapToResult( CORP_USER_STATUS_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + corpUser.setStatus(CorpUserStatusMapper.map(context, new CorpUserStatus(dataMap)))); mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); mapCorpUserSettings( result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); @@ -138,10 +158,13 @@ private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap } private void mapCorpUserInfo( - @Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull CorpUser corpUser, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); + corpUser.setProperties(CorpUserPropertiesMapper.map(context, corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(context, corpUserInfo)); CorpUserProperties corpUserProperties = corpUser.getProperties(); if (corpUserInfo.hasCustomProperties()) { corpUserProperties.setCustomProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index 106e3de6612015..738ae68cd756d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class CorpUserPropertiesMapper @@ -12,12 +14,14 @@ public class CorpUserPropertiesMapper public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); public static CorpUserProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); + return INSTANCE.apply(context, corpUserInfo); } @Override - public CorpUserProperties apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + public CorpUserProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo info) { final CorpUserProperties result = new CorpUserProperties(); result.setActive(info.isActive()); result.setCountryCode(info.getCountryCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index dd9e465a2d4ea9..eb31754a9f0f0a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class CorpUserStatusMapper implements ModelMapper { @@ -10,12 +12,14 @@ public class CorpUserStatusMapper public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); public static CorpUserStatus map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { - return INSTANCE.apply(corpUserStatus); + return INSTANCE.apply(context, corpUserStatus); } @Override - public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus status) { + public CorpUserStatus apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserStatus status) { // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index d01f9b3945dc34..3d53c1e474b9da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -81,7 +80,9 @@ public class DashboardType SUB_TYPES_ASPECT_NAME, EMBED_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); + BROWSE_PATHS_V2_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); private final EntityClient _entityClient; @@ -122,7 +123,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(dashboardMap.getOrDefault(urn, null)); } @@ -132,7 +133,7 @@ public List> batchLoad( gmsDashboard == null ? null : DataFetcherResult.newResult() - .data(DashboardMapper.map(gmsDashboard)) + .data(DashboardMapper.map(context, gmsDashboard)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -151,14 +152,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dashboard", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -170,8 +170,9 @@ public AutoCompleteResults autoComplete( @Nonnull QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete( + context.getOperationContext(), "dashboard", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -187,8 +188,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dashboard", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dashboard", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -196,7 +202,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { @@ -213,10 +219,9 @@ public Dashboard update( @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DashboardUpdateInputMapper.map(input, actor); + DashboardUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -237,7 +242,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 704d2ae308c1ae..4fa52b11365641 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InputFields; @@ -16,6 +18,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AccessLevel; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.Container; @@ -39,8 +43,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -48,19 +54,23 @@ import com.linkedin.metadata.key.DashboardKey; import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardMapper implements ModelMapper { public static final DashboardMapper INSTANCE = new DashboardMapper(); - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Dashboard map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { + public Dashboard apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Dashboard result = new Dashboard(); Urn entityUrn = entityResponse.getUrn(); @@ -74,55 +84,75 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); mappingHelper.mapToResult( DASHBOARD_INFO_ASPECT_NAME, - (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); + (entity, dataMap) -> this.mapDashboardInfo(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dashboard.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); + (dashboard, dataMap) -> + dashboard.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DashboardMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DashboardMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dashboard.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); + dashboard.setInputFields( + InputFieldsMapper.map(context, new InputFields(dataMap), entityUrn))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); mappingHelper.mapToResult( EMBED_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + dashboard.setBrowsePathV2( + BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((dashboard, dataMap) -> + dashboard.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dashboard.class); + } else { + return mappingHelper.getResult(); + } } private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { @@ -141,18 +171,24 @@ private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap data } private void mapDashboardInfo( - @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { + @Nonnull QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap, + Urn entityUrn) { final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + dashboard.setInfo(mapInfo(context, gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(context, gmsDashboardInfo, entityUrn)); } /** * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link * DashboardInfo} */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + private static DashboardInfo mapInfo( + @Nullable final QueryContext context, + final com.linkedin.dashboard.DashboardInfo info, + Urn entityUrn) { final DashboardInfo result = new DashboardInfo(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -178,10 +214,10 @@ private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, U if (info.hasAccess()) { result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } return result; } @@ -190,8 +226,10 @@ private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, U * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link * DashboardProperties} */ - private DashboardProperties mapDashboardInfoToProperties( - final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + private static DashboardProperties mapDashboardInfoToProperties( + @Nullable final QueryContext context, + final com.linkedin.dashboard.DashboardInfo info, + Urn entityUrn) { final DashboardProperties result = new DashboardProperties(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -209,10 +247,10 @@ private DashboardProperties mapDashboardInfoToProperties( if (info.hasAccess()) { result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } return result; } @@ -227,15 +265,21 @@ private void mapEditableDashboardProperties( dashboard.setEditableProperties(dashboardEditableProperties); } - private void mapGlobalTags( - @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dashboard.setGlobalTags(globalTags); dashboard.setTags(globalTags); } - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); dashboard.setContainer( @@ -245,8 +289,11 @@ private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMa .build()); } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); + dashboard.setDomain(DomainAssociationMapper.map(context, domains, dashboard.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index 6212663ee87e4c..d004fb70d41052 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -8,6 +8,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,19 +19,24 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardUpdateInputMapper implements InputModelMapper, Urn> { public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); public static Collection map( - @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DashboardUpdateInput dashboardUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dashboardUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DashboardUpdateInput dashboardUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); @@ -41,7 +47,7 @@ public Collection apply( if (dashboardUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -51,14 +57,14 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( dashboardUpdateInput.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } else { // Tags override global tags globalTags.setTags( new TagAssociationArray( dashboardUpdateInput.getTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index 782ec3d3a6c073..a5abb57672b42b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -1,21 +1,25 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); - public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + public static DashboardUsageMetrics map( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override - public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { + public DashboardUsageMetrics apply( + @Nullable QueryContext context, EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 6ec1979cd090d8..a3a631d450254b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -76,7 +75,9 @@ public class DataFlowType DEPRECATION_ASPECT_NAME, DATA_PLATFORM_INSTANCE_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); + BROWSE_PATHS_V2_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); private final EntityClient _entityClient; @@ -116,7 +117,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(dataFlowMap.getOrDefault(urn, null)); } @@ -126,7 +127,7 @@ public List> batchLoad( gmsDataFlow == null ? null : DataFetcherResult.newResult() - .data(DataFlowMapper.map(gmsDataFlow)) + .data(DataFlowMapper.map(context, gmsDataFlow)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -145,14 +146,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dataFlow", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -164,8 +164,9 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete( + context.getOperationContext(), "dataFlow", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -181,8 +182,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dataFlow", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataFlow", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -191,7 +197,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c final StringArray result = _entityClient.getBrowsePaths( DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -200,10 +206,9 @@ public DataFlow update( throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DataFlowUpdateInputMapper.map(input, actor); + DataFlowUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -224,7 +229,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 165fae81527ab8..9e2612f60abda1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -12,6 +14,8 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataFlowEditableProperties; import com.linkedin.datahub.graphql.generated.DataFlowInfo; @@ -28,8 +32,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datajob.EditableDataFlowProperties; import com.linkedin.domain.Domains; @@ -38,18 +44,22 @@ import com.linkedin.metadata.key.DataFlowKey; import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataFlowMapper implements ModelMapper { public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataFlow map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { + public DataFlow apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataFlow result = new DataFlow(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATA_FLOW); @@ -68,39 +78,53 @@ public DataFlow apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataFlow.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); + (dataFlow, dataMap) -> mapGlobalTags(context, dataFlow, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> dataFlow.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> dataFlow.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DataFlowMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataFlow.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - - return mappingHelper.getResult(); + dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), DataFlow.class); + } else { + return mappingHelper.getResult(); + } } private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { @@ -170,17 +194,21 @@ private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataFlow.setEditableProperties(dataFlowEditableProperties); } - private void mapGlobalTags( - @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull DataFlow dataFlow, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataFlow.setGlobalTags(globalTags); dataFlow.setTags(globalTags); } - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + dataFlow.setDomain(DomainAssociationMapper.map(context, domains, dataFlow.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index 87579a15d586e2..cb9b6f66c6eabf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataFlowUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,19 +19,24 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataFlowUpdateInputMapper implements InputModelMapper, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); public static Collection map( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dataFlowUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dataFlowUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -40,7 +46,7 @@ public Collection apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,13 +56,13 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( dataFlowUpdateInput.getGlobalTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( dataFlowUpdateInput.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index 6e71584007504b..0f69724e1e4301 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -78,7 +77,9 @@ public class DataJobType DATA_PLATFORM_INSTANCE_ASPECT_NAME, DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME); + SUB_TYPES_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("flow"); private final EntityClient _entityClient; @@ -111,6 +112,7 @@ public List> batchLoad( final List urnStrs, @Nonnull final QueryContext context) throws Exception { final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + final Map dataJobMap = _entityClient.batchGetV2( Constants.DATA_JOB_ENTITY_NAME, @@ -118,7 +120,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(dataJobMap.getOrDefault(urn, null)); } @@ -128,7 +130,7 @@ public List> batchLoad( gmsDataJob == null ? null : DataFetcherResult.newResult() - .data(DataJobMapper.map(gmsDataJob)) + .data(DataJobMapper.map(context, gmsDataJob)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -147,14 +149,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dataJob", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -166,8 +167,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete(context.getOperationContext(), "dataJob", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -183,8 +184,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "dataJob", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataJob", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -192,7 +198,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -200,10 +206,9 @@ public DataJob update( @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DataJobUpdateInputMapper.map(input, actor); + DataJobUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -224,7 +229,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 0d0e7a613c8d81..d7da875bc2a29f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.google.common.collect.ImmutableList; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -14,6 +16,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataJob; import com.linkedin.datahub.graphql.generated.DataJobEditableProperties; @@ -33,27 +37,33 @@ import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datajob.EditableDataJobProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.key.DataJobKey; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataJobMapper implements ModelMapper { public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataJob map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { + public DataJob apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataJob result = new DataJob(); Urn entityUrn = entityResponse.getUrn(); @@ -71,8 +81,10 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { DataMap data = aspect.getValue().data(); if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow( - new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + if (context == null || canView(context.getOperationContext(), gmsKey.getFlow())) { + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + } result.setJobId(gmsKey.getJobId()); } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = @@ -92,37 +104,48 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { - result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); + result.setOwnership(OwnershipMapper.map(context, new Ownership(data), entityUrn)); } else if (STATUS_ASPECT_NAME.equals(name)) { - result.setStatus(StatusMapper.map(new Status(data))); + result.setStatus(StatusMapper.map(context, new Status(data))); } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { result.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(data), entityUrn)); } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { result.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); + GlossaryTermsMapper.map(context, new GlossaryTerms(data), entityUrn)); } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. - result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); + result.setDomain( + DomainAssociationMapper.map(context, domains, entityUrn.toString())); } else if (DEPRECATION_ASPECT_NAME.equals(name)) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); + result.setDeprecation(DeprecationMapper.map(context, new Deprecation(data))); } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { result.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { - result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); + result.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(data))); } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { - result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); + result.setSubTypes(SubTypesMapper.map(context, new SubTypes(data))); + } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(name)) { + result.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(data))); + } else if (FORMS_ASPECT_NAME.equals(name)) { + result.setForms(FormsMapper.map(new Forms(data), entityUrn.toString())); } }); - return result; + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, DataJob.class); + } else { + return result; + } } /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b0f299e00b4bae..a1d0123d3a5211 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataJobUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,19 +19,24 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataJobUpdateInputMapper implements InputModelMapper, Urn> { public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); public static Collection map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DataJobUpdateInput dataJobUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dataJobUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DataJobUpdateInput dataJobUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); @@ -41,7 +47,7 @@ public Collection apply( if (dataJobUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -51,13 +57,13 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( dataJobUpdateInput.getGlobalTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( dataJobUpdateInput.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 567d275dbee0a3..7e939719a3ec4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -58,7 +58,7 @@ public List> batchLoad( gmsPlatform == null ? null : DataFetcherResult.newResult() - .data(DataPlatformMapper.map(gmsPlatform)) + .data(DataPlatformMapper.map(context, gmsPlatform)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index c2dc3bfabd07c8..a7c765f5dcbf68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformInfo; import com.linkedin.datahub.graphql.generated.PlatformType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Deprecated public class DataPlatformInfoMapper @@ -12,12 +14,15 @@ public class DataPlatformInfoMapper public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); public static DataPlatformInfo map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + return INSTANCE.apply(context, platform); } @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + public DataPlatformInfo apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { final DataPlatformInfo result = new DataPlatformInfo(); result.setType(PlatformType.valueOf(input.getType().toString())); result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index f7078f9f37d7c6..df3fc7fb6434e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -14,17 +15,20 @@ import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformMapper implements ModelMapper { public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); + public static DataPlatform map( + @Nullable QueryContext context, @Nonnull final EntityResponse platform) { + return INSTANCE.apply(context, platform); } @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { + public DataPlatform apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataPlatform result = new DataPlatform(); final DataPlatformKey dataPlatformKey = (DataPlatformKey) @@ -48,7 +52,7 @@ public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> dataPlatform.setProperties( - DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); + DataPlatformPropertiesMapper.map(context, new DataPlatformInfo(dataMap)))); return mappingHelper.getResult(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index ad6de5505bed6b..0043ad65ee5db9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformProperties; import com.linkedin.datahub.graphql.generated.PlatformType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformPropertiesMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class DataPlatformPropertiesMapper public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); public static DataPlatformProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + return INSTANCE.apply(context, platform); } @Override public DataPlatformProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { final DataPlatformProperties result = new DataPlatformProperties(); result.setType(PlatformType.valueOf(input.getType().toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 6519a493f39917..36399ddf784d98 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -90,7 +90,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) + .data(DataPlatformInstanceMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); @@ -121,7 +121,11 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), + DATA_PLATFORM_INSTANCE_ENTITY_NAME, + query, + filters, + limit); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index 1a2bd0488c4bd6..ed9bf0c82d869b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; @@ -23,16 +24,19 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformInstanceMapper { public static final DataPlatformInstanceMapper INSTANCE = new DataPlatformInstanceMapper(); - public static DataPlatformInstance map(final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataPlatformInstance map( + @Nullable QueryContext context, final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } - public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) { + public DataPlatformInstance apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataPlatformInstance result = new DataPlatformInstance(); final Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityUrn.toString()); @@ -50,24 +54,26 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) Constants.OWNERSHIP_ASPECT_NAME, (dataPlatformInstance, dataMap) -> dataPlatformInstance.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( Constants.GLOBAL_TAGS_ASPECT_NAME, (dataPlatformInstance, dataMap) -> - this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn)); + this.mapGlobalTags(context, dataPlatformInstance, dataMap, entityUrn)); mappingHelper.mapToResult( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataPlatformInstance, dataMap) -> dataPlatformInstance.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( Constants.STATUS_ASPECT_NAME, (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap)))); + dataPlatformInstance.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( Constants.DEPRECATION_ASPECT_NAME, (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataPlatformInstance.setDeprecation( + DeprecationMapper.map(context, new Deprecation(dataMap)))); return mappingHelper.getResult(); } @@ -103,12 +109,13 @@ private void mapDataPlatformInstanceProperties( dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags( + private static void mapGlobalTags( + @Nullable QueryContext context, @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index 48a0cb984862d3..7a4d342281fe54 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -4,6 +4,7 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; @@ -13,6 +14,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -23,12 +25,14 @@ public class DataProcessInstanceMapper implements ModelMapper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( - DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + context, DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); return mappingHelper.getResult(); } private void mapDataProcessProperties( - @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + @Nonnull QueryContext context, @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); dpi.setName(dataProcessInstanceProperties.getName()); if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); + dpi.setCreated(AuditStampMapper.map(context, dataProcessInstanceProperties.getCreated())); } if (dataProcessInstanceProperties.hasExternalUrl()) { dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index fd60711e8c569d..3c8639c07c0360 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.dataprocess.DataProcessInstanceRunEvent; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper { @@ -13,13 +15,13 @@ public class DataProcessInstanceRunEventMapper new DataProcessInstanceRunEventMapper(); public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { DataProcessInstanceRunEvent runEvent = GenericRecordUtils.deserializeAspect( @@ -38,7 +40,10 @@ public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( runEvent.getStatus().toString())); } if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); + result.setResult(DataProcessInstanceRunResultMapper.map(context, runEvent.getResult())); + } + if (runEvent.hasDurationMillis()) { + result.setDurationMillis(runEvent.getDurationMillis()); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 422bea73925a85..7026856503a0bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResultType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataProcessInstanceRunResultMapper implements ModelMapper< @@ -14,13 +16,13 @@ public class DataProcessInstanceRunResultMapper new DataProcessInstanceRunResultMapper(); public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( - @Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); + @Nullable QueryContext context, @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(context, input); } @Override public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( - @Nonnull final DataProcessInstanceRunResult input) { + @Nullable QueryContext context, @Nonnull final DataProcessInstanceRunResult input) { final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index 766f6937ce3e29..6689ddf56afe44 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -3,10 +3,12 @@ import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -49,7 +51,9 @@ public class DataProductType GLOBAL_TAGS_ASPECT_NAME, GLOSSARY_TERMS_ASPECT_NAME, DOMAINS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME); + INSTITUTIONAL_MEMORY_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -81,7 +85,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : dataProductUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -91,7 +95,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataProductMapper.map(gmsResult)) + .data(DataProductMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -109,8 +113,8 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), DATA_PRODUCT_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 8039ea08dc722c..08637dbfd01edc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,12 +1,16 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -14,6 +18,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; @@ -21,25 +27,31 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataProductMapper implements ModelMapper { public static final DataProductMapper INSTANCE = new DataProductMapper(); - public static DataProduct map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataProduct map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataProduct apply(@Nonnull final EntityResponse entityResponse) { + public DataProduct apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataProduct result = new DataProduct(); Urn entityUrn = entityResponse.getUrn(); @@ -54,28 +66,43 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + dataProduct.setTags(GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> dataProduct.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult( DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> dataProduct.setDomain( - DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); + DomainAssociationMapper.map(context, new Domains(dataMap), dataProduct.getUrn()))); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataProduct.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> dataProduct.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return result; + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, DataProduct.class); + } else { + return result; + } } private void mapDataProductProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index badb24810c82bf..30d03d4b5c3564 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -40,7 +40,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -88,15 +87,17 @@ public class DatasetType DATA_PRODUCTS_ASPECT_NAME, BROWSE_PATHS_V2_ASPECT_NAME, ACCESS_DATASET_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME, SUB_TYPES_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; - private final EntityClient _entityClient; + private final EntityClient entityClient; public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; + this.entityClient = entityClient; } @Override @@ -131,13 +132,13 @@ public List> batchLoad( final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final Map datasetMap = - _entityClient.batchGetV2( + entityClient.batchGetV2( Constants.DATASET_ENTITY_NAME, new HashSet<>(urns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(datasetMap.getOrDefault(urn, null)); } @@ -147,7 +148,7 @@ public List> batchLoad( gmsDataset == null ? null : DataFetcherResult.newResult() - .data(DatasetMapper.map(gmsDataset)) + .data(DatasetMapper.map(context, gmsDataset)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -165,15 +166,14 @@ public SearchResults search( throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = - _entityClient.search( + entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ENTITY_NAME, query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -185,8 +185,9 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -201,23 +202,28 @@ public BrowseResults browse( final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = - _entityClient.browse( - "dataset", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataset", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { final StringArray result = - _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(context, result); } @Override public List batchUpdate( @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Urn actor = Urn.createFromString(context.getActorUrn()); final Collection proposals = Arrays.stream(input) @@ -225,7 +231,7 @@ public List batchUpdate( updateInput -> { if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { Collection datasetProposals = - DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); + DatasetUpdateInputMapper.map(context, updateInput.getUpdate(), actor); datasetProposals.forEach( proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); return datasetProposals; @@ -240,7 +246,7 @@ public List batchUpdate( Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } @@ -255,14 +261,13 @@ public Dataset update( @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DatasetUpdateInputMapper.map(input, actor); + DatasetUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } @@ -279,7 +284,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index df019cc5df8fed..a328e31ba76085 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -94,7 +94,7 @@ public List> batchLoad( gmsDataset == null ? null : DataFetcherResult.newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) + .data(VersionedDatasetMapper.map(context, gmsDataset)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 5fe7815ea2f8d3..e63335beef9c14 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.assertion.AssertionRunEvent; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AssertionResult; import com.linkedin.datahub.graphql.generated.AssertionResultType; import com.linkedin.datahub.graphql.generated.AssertionRunStatus; @@ -12,6 +13,7 @@ import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AssertionRunEventMapper implements TimeSeriesAspectMapper { @@ -19,13 +21,13 @@ public class AssertionRunEventMapper public static final AssertionRunEventMapper INSTANCE = new AssertionRunEventMapper(); public static com.linkedin.datahub.graphql.generated.AssertionRunEvent map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = GenericRecordUtils.deserializeAspect( @@ -43,17 +45,17 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setStatus( AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { - assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); + assertionRunEvent.setBatchSpec(mapBatchSpec(context, gmsAssertionRunEvent.getBatchSpec())); } if (gmsAssertionRunEvent.hasPartitionSpec()) { assertionRunEvent.setPartitionSpec(mapPartitionSpec(gmsAssertionRunEvent.getPartitionSpec())); } if (gmsAssertionRunEvent.hasResult()) { - assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); + assertionRunEvent.setResult(mapAssertionResult(context, gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { assertionRunEvent.setRuntimeContext( - StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + StringMapMapper.map(context, gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; @@ -66,7 +68,8 @@ private PartitionSpec mapPartitionSpec(com.linkedin.timeseries.PartitionSpec gms return partitionSpec; } - private AssertionResult mapAssertionResult(com.linkedin.assertion.AssertionResult gmsResult) { + private AssertionResult mapAssertionResult( + @Nullable QueryContext context, com.linkedin.assertion.AssertionResult gmsResult) { AssertionResult datasetAssertionResult = new AssertionResult(); datasetAssertionResult.setRowCount(gmsResult.getRowCount()); datasetAssertionResult.setActualAggValue(gmsResult.getActualAggValue()); @@ -79,18 +82,20 @@ private AssertionResult mapAssertionResult(com.linkedin.assertion.AssertionResul } if (gmsResult.hasNativeResults()) { - datasetAssertionResult.setNativeResults(StringMapMapper.map(gmsResult.getNativeResults())); + datasetAssertionResult.setNativeResults( + StringMapMapper.map(context, gmsResult.getNativeResults())); } return datasetAssertionResult; } - private BatchSpec mapBatchSpec(com.linkedin.assertion.BatchSpec gmsBatchSpec) { + private BatchSpec mapBatchSpec( + @Nullable QueryContext context, com.linkedin.assertion.BatchSpec gmsBatchSpec) { BatchSpec batchSpec = new BatchSpec(); batchSpec.setNativeBatchId(gmsBatchSpec.getNativeBatchId()); batchSpec.setLimit(gmsBatchSpec.getLimit()); batchSpec.setQuery(gmsBatchSpec.getQuery()); - batchSpec.setCustomProperties(StringMapMapper.map(gmsBatchSpec.getCustomProperties())); + batchSpec.setCustomProperties(StringMapMapper.map(context, gmsBatchSpec.getCustomProperties())); return batchSpec; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1644e0243a1812..a4b076f8c8bf22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetDeprecationMapper implements ModelMapper { @@ -10,12 +12,15 @@ public class DatasetDeprecationMapper public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); public static Deprecation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); + return INSTANCE.apply(context, deprecation); } @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + public Deprecation apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataset.DatasetDeprecation input) { final Deprecation result = new Deprecation(); result.setActor(input.getActor().toString()); result.setDeprecated(input.isDeprecated()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java new file mode 100644 index 00000000000000..de715f28ef7833 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java @@ -0,0 +1,28 @@ +package com.linkedin.datahub.graphql.types.dataset.mappers; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DatasetFilter; +import com.linkedin.datahub.graphql.generated.DatasetFilterType; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class DatasetFilterMapper + implements ModelMapper { + + public static final DatasetFilterMapper INSTANCE = new DatasetFilterMapper(); + + public static DatasetFilter map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetFilter metadata) { + return INSTANCE.apply(context, metadata); + } + + @Override + public DatasetFilter apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetFilter input) { + final DatasetFilter result = new DatasetFilter(); + result.setType(DatasetFilterType.valueOf(input.getType().name())); + result.setSql(input.getSql()); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 7fa1decdf7f552..89d5aa8621bf08 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Access; @@ -7,6 +8,7 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -17,6 +19,8 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; @@ -38,9 +42,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,7 +59,9 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** @@ -66,11 +74,17 @@ public class DatasetMapper implements ModelMapper { public static final DatasetMapper INSTANCE = new DatasetMapper(); - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); + public static Dataset map( + @Nullable final QueryContext context, @Nonnull final EntityResponse dataset) { + return INSTANCE.apply(context, dataset); } public Dataset apply(@Nonnull final EntityResponse entityResponse) { + return apply(null, entityResponse); + } + + public Dataset apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { Dataset result = new Dataset(); Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityResponse.getUrn().toString()); @@ -88,11 +102,12 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + dataset.setDeprecation( + DatasetDeprecationMapper.map(context, new DatasetDeprecation(dataMap)))); mappingHelper.mapToResult( SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); + dataset.setSchema(SchemaMapper.map(context, new SchemaMetadata(dataMap), entityUrn))); mappingHelper.mapToResult( EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); @@ -100,41 +115,44 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> dataset.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataset.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> dataset.setEditableSchemaMetadata( - EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + EditableSchemaMetadataMapper.map( + context, new EditableSchemaMetadata(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> dataset.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DatasetMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DatasetMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataset.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( SIBLINGS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + (dataset, dataMap) -> + dataset.setSiblings(SiblingsMapper.map(context, new Siblings(dataMap)))); mappingHelper.mapToResult( UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> @@ -142,19 +160,34 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); mappingHelper.mapToResult( EMBED_ASPECT_NAME, - (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> - dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dataset.class); + } else { + return mappingHelper.getResult(); + } } private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { @@ -225,18 +258,23 @@ private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMa graphqlProperties.setMaterialized(properties.isMaterialized()); graphqlProperties.setLanguage(properties.getViewLanguage()); graphqlProperties.setLogic(properties.getViewLogic()); + graphqlProperties.setFormattedLogic(properties.getFormattedViewLogic()); dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags( - @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Dataset dataset, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataset.setGlobalTags(globalTags); dataset.setTags(globalTags); } - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull Dataset dataset, @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); dataset.setContainer( @@ -246,8 +284,9 @@ private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { .build()); } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Dataset dataset, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + dataset.setDomain(DomainAssociationMapper.map(context, domains, dataset.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index 25639e431fac12..e966993871d067 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.dataset.DatasetFieldProfile; import com.linkedin.dataset.DatasetProfile; @@ -7,6 +8,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetProfileMapper implements TimeSeriesAspectMapper { @@ -14,13 +16,13 @@ public class DatasetProfileMapper public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); public static com.linkedin.datahub.graphql.generated.DatasetProfile map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.DatasetProfile apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { DatasetProfile gmsProfile = GenericRecordUtils.deserializeAspect( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 0b05d420030b5e..122298bcab6547 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; @@ -23,6 +24,7 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetUpdateInputMapper implements InputModelMapper, Urn> { @@ -30,13 +32,17 @@ public class DatasetUpdateInputMapper public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection map( - @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(datasetUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DatasetUpdateInput datasetUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, datasetUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DatasetUpdateInput datasetUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(6); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATASET_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -46,7 +52,7 @@ public Collection apply( if (datasetUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -65,7 +71,8 @@ public Collection apply( if (datasetUpdateInput.getInstitutionalMemory() != null) { proposals.add( updateMappingHelper.aspectToProposal( - InstitutionalMemoryUpdateMapper.map(datasetUpdateInput.getInstitutionalMemory()), + InstitutionalMemoryUpdateMapper.map( + context, datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); } @@ -75,14 +82,14 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( datasetUpdateInput.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field globalTags.setTags( new TagAssociationArray( datasetUpdateInput.getTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); @@ -93,7 +100,7 @@ public Collection apply( editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() - .map(element -> mapSchemaFieldInfo(element)) + .map(element -> mapSchemaFieldInfo(context, element)) .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); @@ -117,6 +124,7 @@ public Collection apply( } private EditableSchemaFieldInfo mapSchemaFieldInfo( + @Nullable QueryContext context, final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); @@ -130,7 +138,7 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( globalTags.setTags( new TagAssociationArray( schemaFieldInfo.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index f54adbe8ba26c6..15ba9d025ec85b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -1,22 +1,28 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EditableSchemaFieldInfoMapper { public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(fieldInfo, entityUrn); + @Nullable final QueryContext context, + @Nonnull final EditableSchemaFieldInfo fieldInfo, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, fieldInfo, entityUrn); } public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, @Nonnull final Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final EditableSchemaFieldInfo input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); if (input.hasDescription()) { @@ -26,11 +32,12 @@ public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( result.setFieldPath((input.getFieldPath())); } if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setGlobalTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); } if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, input.getGlossaryTerms(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 3cf012a523d544..1c1e77f66a1ece 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,26 +1,32 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EditableSchemaMetadataMapper { public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + @Nullable QueryContext context, + @Nonnull final EditableSchemaMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); } public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( - @Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final EditableSchemaMetadata input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); result.setEditableSchemaFieldInfo( input.getEditableSchemaFieldInfo().stream() - .map(schemaField -> EditableSchemaFieldInfoMapper.map(schemaField, entityUrn)) + .map(schemaField -> EditableSchemaFieldInfoMapper.map(context, schemaField, entityUrn)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b99b243da5b94a..56ec8de758857a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -1,43 +1,48 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { private ForeignKeyConstraintMapper() {} - public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { + public static ForeignKeyConstraint map( + @Nullable QueryContext context, com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); result.setName(constraint.getName()); if (constraint.hasForeignDataset()) { - result.setForeignDataset((Dataset) UrnToEntityMapper.map(constraint.getForeignDataset())); + result.setForeignDataset( + (Dataset) UrnToEntityMapper.map(context, constraint.getForeignDataset())); } if (constraint.hasSourceFields()) { result.setSourceFields( constraint.getSourceFields().stream() - .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .map(schemaFieldUrn -> mapSchemaFieldEntity(context, schemaFieldUrn)) .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( constraint.getForeignFields().stream() - .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .map(schemaFieldUrn -> mapSchemaFieldEntity(context, schemaFieldUrn)) .collect(Collectors.toList())); } return result; } - private static SchemaFieldEntity mapSchemaFieldEntity(Urn schemaFieldUrn) { + private static SchemaFieldEntity mapSchemaFieldEntity( + @Nullable QueryContext context, Urn schemaFieldUrn) { SchemaFieldEntity result = new SchemaFieldEntity(); try { Urn resourceUrn = Urn.createFromString(schemaFieldUrn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(resourceUrn)); + result.setParent(UrnToEntityMapper.map(context, resourceUrn)); } catch (Exception e) { throw new RuntimeException("Error converting schemaField parent urn string to Urn", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index dd345bebf657f9..28096f30d1817d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.KeyValueSchema; import com.linkedin.datahub.graphql.generated.PlatformSchema; import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class PlatformSchemaMapper implements ModelMapper { public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); + public static PlatformSchema map( + @Nullable QueryContext context, @Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(context, metadata); } @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { + public PlatformSchema apply( + @Nullable QueryContext context, @Nonnull final SchemaMetadata.PlatformSchema input) { Object result; if (input.isSchemaless()) { return null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index f53803ce5be855..a2cc9d5a66edd9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -1,23 +1,32 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.metadata.utils.SchemaFieldUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SchemaFieldMapper { public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); public static SchemaField map( - @Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + @Nullable final QueryContext context, + @Nonnull final com.linkedin.schema.SchemaField metadata, + @Nonnull Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); } public SchemaField apply( - @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final com.linkedin.schema.SchemaField input, + @Nonnull Urn entityUrn) { final SchemaField result = new SchemaField(); result.setDescription(input.getDescription()); result.setFieldPath(input.getFieldPath()); @@ -28,14 +37,17 @@ public SchemaField apply( result.setType(mapSchemaFieldDataType(input.getType())); result.setLabel(input.getLabel()); if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setGlobalTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); } if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, input.getGlossaryTerms(), entityUrn)); } result.setIsPartOfKey(input.isIsPartOfKey()); result.setIsPartitioningKey(input.isIsPartitioningKey()); + result.setJsonProps(input.getJsonProps()); + result.setSchemaFieldEntity(this.createSchemaFieldEntity(input, entityUrn)); return result; } @@ -74,4 +86,14 @@ private SchemaFieldDataType mapSchemaFieldDataType( "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } } + + private SchemaFieldEntity createSchemaFieldEntity( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + SchemaFieldEntity schemaFieldEntity = new SchemaFieldEntity(); + schemaFieldEntity.setUrn( + SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), input.getFieldPath()) + .toString()); + schemaFieldEntity.setType(EntityType.SCHEMA_FIELD); + return schemaFieldEntity; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index d0424ba89eca1c..fd089184fb1c4b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; @@ -12,18 +13,23 @@ public class SchemaMapper { public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); + public static Schema map( + @Nullable QueryContext context, + @Nonnull final SchemaMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, null, entityUrn); } public static Schema map( + @Nullable QueryContext context, @Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); + return INSTANCE.apply(context, metadata, systemMetadata, entityUrn); } public Schema apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { @@ -42,13 +48,13 @@ public Schema apply( result.setPrimaryKeys(input.getPrimaryKeys()); result.setFields( input.getFields().stream() - .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .map(field -> SchemaFieldMapper.map(context, field, entityUrn)) .collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setPlatformSchema(PlatformSchemaMapper.map(context, input.getPlatformSchema())); if (input.getForeignKeys() != null) { result.setForeignKeys( input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) + .map(fk -> ForeignKeyConstraintMapper.map(context, fk)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 31381073a16dd0..327cae3bae11f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -1,23 +1,37 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.schema.SchemaMetadata; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SchemaMetadataMapper { public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(aspect, entityUrn); + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, aspect, entityUrn); } public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + return apply(context, input, entityUrn, aspect.getVersion()); + } + + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nullable QueryContext context, + @Nonnull final SchemaMetadata input, + final Urn entityUrn, + final long version) { final com.linkedin.datahub.graphql.generated.SchemaMetadata result = new com.linkedin.datahub.graphql.generated.SchemaMetadata(); @@ -32,14 +46,16 @@ public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( result.setPrimaryKeys(input.getPrimaryKeys()); result.setFields( input.getFields().stream() - .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .map(field -> SchemaFieldMapper.map(context, field, entityUrn)) .collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); + result.setPlatformSchema(PlatformSchemaMapper.map(context, input.getPlatformSchema())); + result.setAspectVersion(version); if (input.hasForeignKeys()) { result.setForeignKeys( input.getForeignKeys().stream() - .map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map(foreignKeyConstraint)) + .map( + foreignKeyConstraint -> + ForeignKeyConstraintMapper.map(context, foreignKeyConstraint)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 727e8629f74b2e..817c7c983ecc5c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Deprecation; @@ -10,6 +11,8 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DatasetEditableProperties; @@ -38,6 +41,7 @@ import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** @@ -50,12 +54,14 @@ public class VersionedDatasetMapper implements ModelMapper - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + dataset.setDeprecation( + DatasetDeprecationMapper.map(context, new DatasetDeprecation(dataMap)))); mappingHelper.mapToResult( SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> dataset.setSchema( - SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + SchemaMapper.map( + context, new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); mappingHelper.mapToResult( EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); @@ -85,35 +93,42 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> dataset.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataset.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> dataset.setEditableSchemaMetadata( - EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + EditableSchemaMetadataMapper.map( + context, new EditableSchemaMetadata(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> dataset.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, CONTAINER_ASPECT_NAME, VersionedDatasetMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, VersionedDatasetMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataset.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), VersionedDataset.class); + } else { + return mappingHelper.getResult(); + } } private SystemMetadata getSystemMetadata(EnvelopedAspectMap aspectMap, String aspectName) { @@ -174,14 +189,20 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags( - @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } - private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); dataset.setContainer( @@ -191,9 +212,12 @@ private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap d .build()); } - private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + dataset.setDomain(DomainAssociationMapper.map(context, domains, dataset.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java new file mode 100644 index 00000000000000..b2e3b2c7447d81 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.DataTypeInfo; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class DataTypeEntityMapper implements ModelMapper { + + public static final DataTypeEntityMapper INSTANCE = new DataTypeEntityMapper(); + + public static DataTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public DataTypeEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final DataTypeEntity result = new DataTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_TYPE_INFO_ASPECT_NAME, this::mapDataTypeInfo); + + // Set the standard Type ENUM for the data type. + if (result.getInfo() != null) { + result.getInfo().setType(DataTypeUrnMapper.getType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapDataTypeInfo(@Nonnull DataTypeEntity dataType, @Nonnull DataMap dataMap) { + com.linkedin.datatype.DataTypeInfo gmsInfo = new com.linkedin.datatype.DataTypeInfo(dataMap); + DataTypeInfo info = new DataTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + dataType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java new file mode 100644 index 00000000000000..52fd21d5583218 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class DataTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATA_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.DATA_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + DATA_TYPE_ENTITY_NAME, + new HashSet<>(dataTypeUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataTypeEntityMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load data type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java new file mode 100644 index 00000000000000..ec71cd63a70d5e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java @@ -0,0 +1,40 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.StdDataType; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataTypeUrnMapper { + + static final Map DATA_TYPE_ENUM_TO_URN = + ImmutableMap.builder() + .put(StdDataType.STRING, "urn:li:dataType:datahub.string") + .put(StdDataType.NUMBER, "urn:li:dataType:datahub.number") + .put(StdDataType.URN, "urn:li:dataType:datahub.urn") + .put(StdDataType.RICH_TEXT, "urn:li:dataType:datahub.rich_text") + .put(StdDataType.DATE, "urn:li:dataType:datahub.date") + .build(); + + private static final Map URN_TO_DATA_TYPE_ENUM = + DATA_TYPE_ENUM_TO_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private DataTypeUrnMapper() {} + + public static StdDataType getType(String dataTypeUrn) { + if (!URN_TO_DATA_TYPE_ENUM.containsKey(dataTypeUrn)) { + return StdDataType.OTHER; + } + return URN_TO_DATA_TYPE_ENUM.get(dataTypeUrn); + } + + @Nonnull + public static String getUrn(StdDataType dataType) { + if (!DATA_TYPE_ENUM_TO_URN.containsKey(dataType)) { + throw new IllegalArgumentException("Unknown data type: " + dataType); + } + return DATA_TYPE_ENUM_TO_URN.get(dataType); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index 51ef254f52225f..37b2018a2d4506 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainAssociation; import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,13 +19,19 @@ public class DomainAssociationMapper { public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - return INSTANCE.apply(domains, entityUrn); + @Nullable final QueryContext context, + @Nonnull final com.linkedin.domain.Domains domains, + @Nonnull final String entityUrn) { + return INSTANCE.apply(context, domains, entityUrn); } public DomainAssociation apply( - @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { + @Nullable final QueryContext context, + @Nonnull final com.linkedin.domain.Domains domains, + @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0 + && (context == null + || canView(context.getOperationContext(), domains.getDomains().get(0)))) { DomainAssociation association = new DomainAssociation(); association.setDomain( Domain.builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index 7ff1f70311b22e..7d05e0862a96da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -1,22 +1,33 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.linkedin.common.Forms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.domain.DomainProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; +import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nullable; public class DomainMapper { - public static Domain map(final EntityResponse entityResponse) { + public static Domain map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Domain result = new Domain(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -42,7 +53,8 @@ public static Domain map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { result.setOwnership( - OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + OwnershipMapper.map( + context, new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedInstitutionalMemory = @@ -50,10 +62,29 @@ public static Domain map(final EntityResponse entityResponse) { if (envelopedInstitutionalMemory != null) { result.setInstitutionalMemory( InstitutionalMemoryMapper.map( - new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + context, + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), + entityUrn)); + } + + final EnvelopedAspect envelopedStructuredProps = aspects.get(STRUCTURED_PROPERTIES_ASPECT_NAME); + if (envelopedStructuredProps != null) { + result.setStructuredProperties( + StructuredPropertiesMapper.map( + context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); } - return result; + final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); + if (envelopedForms != null) { + result.setForms( + FormsMapper.map(new Forms(envelopedForms.getValue().data()), entityUrn.toString())); + } + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, Domain.class); + } else { + return result; + } } private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index 06d5df9354380d..a5d4b0176bde1c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -38,7 +38,9 @@ public class DomainType Constants.DOMAIN_KEY_ASPECT_NAME, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STRUCTURED_PROPERTIES_ASPECT_NAME, + Constants.FORMS_ASPECT_NAME); private final EntityClient _entityClient; public DomainType(final EntityClient entityClient) { @@ -73,7 +75,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -83,7 +85,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DomainMapper.map(gmsResult)) + .data(DomainMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -113,8 +115,8 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), Constants.DOMAIN_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } private Urn getUrn(final String urnStr) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java new file mode 100644 index 00000000000000..8afdd3f60e2205 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java @@ -0,0 +1,58 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityTypeInfo; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class EntityTypeEntityMapper implements ModelMapper { + + public static final EntityTypeEntityMapper INSTANCE = new EntityTypeEntityMapper(); + + public static EntityTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public EntityTypeEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final EntityTypeEntity result = new EntityTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.ENTITY_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ENTITY_TYPE_INFO_ASPECT_NAME, this::mapEntityTypeInfo); + + // Set the standard Type ENUM for the entity type. + if (result.getInfo() != null) { + result + .getInfo() + .setType(EntityTypeUrnMapper.getEntityType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapEntityTypeInfo(@Nonnull EntityTypeEntity entityType, @Nonnull DataMap dataMap) { + com.linkedin.entitytype.EntityTypeInfo gmsInfo = + new com.linkedin.entitytype.EntityTypeInfo(dataMap); + EntityTypeInfo info = new EntityTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + entityType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java similarity index 86% rename from datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java rename to datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java index aba781f9e1dc70..e36d4e17f564da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.graphql.resolvers; +package com.linkedin.datahub.graphql.types.entitytype; import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.graphql.generated.EntityType; @@ -17,7 +17,6 @@ public class EntityTypeMapper { ImmutableMap.builder() .put(EntityType.DATASET, "dataset") .put(EntityType.ROLE, "role") - .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) .put(EntityType.CORP_USER, "corpuser") .put(EntityType.CORP_GROUP, "corpGroup") .put(EntityType.DATA_PLATFORM, "dataPlatform") @@ -39,8 +38,13 @@ public class EntityTypeMapper { .put(EntityType.NOTEBOOK, "notebook") .put(EntityType.DATA_PLATFORM_INSTANCE, "dataPlatformInstance") .put(EntityType.TEST, "test") + .put(EntityType.ER_MODEL_RELATIONSHIP, Constants.ER_MODEL_RELATIONSHIP_ENTITY_NAME) .put(EntityType.DATAHUB_VIEW, Constants.DATAHUB_VIEW_ENTITY_NAME) .put(EntityType.DATA_PRODUCT, Constants.DATA_PRODUCT_ENTITY_NAME) + .put(EntityType.SCHEMA_FIELD, "schemaField") + .put(EntityType.STRUCTURED_PROPERTY, Constants.STRUCTURED_PROPERTY_ENTITY_NAME) + .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) + .put(EntityType.RESTRICTED, Constants.RESTRICTED_ENTITY_NAME) .build(); private static final Map ENTITY_NAME_TO_TYPE = @@ -52,7 +56,7 @@ private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); if (!ENTITY_NAME_TO_TYPE.containsKey(lowercaseName)) { - throw new IllegalArgumentException("Unknown entity name: " + name); + return EntityType.OTHER; } return ENTITY_NAME_TO_TYPE.get(lowercaseName); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java new file mode 100644 index 00000000000000..b8f7816df97e7b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class EntityTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(ENTITY_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.ENTITY_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return EntityTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List entityTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + ENTITY_TYPE_ENTITY_NAME, + new HashSet<>(entityTypeUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(EntityTypeEntityMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load entity type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java new file mode 100644 index 00000000000000..9e9bf86e5fe7fe --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java @@ -0,0 +1,85 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +/** + * In this class we statically map "well-supported" entity types into a more usable Enum class + * served by our GraphQL API. + * + *

When we add new entity types / entity urns, we MAY NEED to update this. + * + *

Note that we currently do not support mapping entities that fall outside of this set. If you + * try to map an entity type without a corresponding enum symbol, the mapping WILL FAIL. + */ +public class EntityTypeUrnMapper { + + static final Map ENTITY_NAME_TO_ENTITY_TYPE_URN = + ImmutableMap.builder() + .put(Constants.DATASET_ENTITY_NAME, "urn:li:entityType:datahub.dataset") + .put(Constants.ROLE_ENTITY_NAME, "urn:li:entityType:datahub.role") + .put(Constants.CORP_USER_ENTITY_NAME, "urn:li:entityType:datahub.corpuser") + .put(Constants.CORP_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.corpGroup") + .put(Constants.DATA_PLATFORM_ENTITY_NAME, "urn:li:entityType:datahub.dataPlatform") + .put(Constants.DASHBOARD_ENTITY_NAME, "urn:li:entityType:datahub.dashboard") + .put(Constants.CHART_ENTITY_NAME, "urn:li:entityType:datahub.chart") + .put(Constants.TAG_ENTITY_NAME, "urn:li:entityType:datahub.tag") + .put(Constants.DATA_FLOW_ENTITY_NAME, "urn:li:entityType:datahub.dataFlow") + .put(Constants.DATA_JOB_ENTITY_NAME, "urn:li:entityType:datahub.dataJob") + .put(Constants.GLOSSARY_TERM_ENTITY_NAME, "urn:li:entityType:datahub.glossaryTerm") + .put(Constants.GLOSSARY_NODE_ENTITY_NAME, "urn:li:entityType:datahub.glossaryNode") + .put(Constants.ML_MODEL_ENTITY_NAME, "urn:li:entityType:datahub.mlModel") + .put(Constants.ML_MODEL_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.mlModelGroup") + .put(Constants.ML_FEATURE_TABLE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeatureTable") + .put(Constants.ML_FEATURE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeature") + .put(Constants.ML_PRIMARY_KEY_ENTITY_NAME, "urn:li:entityType:datahub.mlPrimaryKey") + .put(Constants.CONTAINER_ENTITY_NAME, "urn:li:entityType:datahub.container") + .put(Constants.DOMAIN_ENTITY_NAME, "urn:li:entityType:datahub.domain") + .put(Constants.NOTEBOOK_ENTITY_NAME, "urn:li:entityType:datahub.notebook") + .put( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataPlatformInstance") + .put(Constants.TEST_ENTITY_NAME, "urn:li:entityType:datahub.test") + .put(Constants.DATAHUB_VIEW_ENTITY_NAME, "urn:li:entityType:datahub.dataHubView") + .put(Constants.DATA_PRODUCT_ENTITY_NAME, "urn:li:entityType:datahub.dataProduct") + .put(Constants.ASSERTION_ENTITY_NAME, "urn:li:entityType:datahub.assertion") + .put(Constants.SCHEMA_FIELD_ENTITY_NAME, "urn:li:entityType:datahub.schemaField") + .build(); + + private static final Map ENTITY_TYPE_URN_TO_NAME = + ENTITY_NAME_TO_ENTITY_TYPE_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private EntityTypeUrnMapper() {} + + public static String getName(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + return ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + } + + /* + * Takes in a entityTypeUrn and returns a GraphQL EntityType by first mapping + * the urn to the entity name, and then mapping the entity name to EntityType. + */ + public static EntityType getEntityType(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + final String entityName = ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + return EntityTypeMapper.getType(entityName); + } + + @Nonnull + public static String getEntityTypeUrn(String name) { + if (!ENTITY_NAME_TO_ENTITY_TYPE_URN.containsKey(name)) { + throw new IllegalArgumentException("Unknown entity name: " + name); + } + return ENTITY_NAME_TO_ENTITY_TYPE_URN.get(name); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java new file mode 100644 index 00000000000000..53d76de1a1fd24 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java @@ -0,0 +1,114 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.CorpuserUrn; +import com.linkedin.common.urn.ERModelRelationshipUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipPropertiesInput; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationMapper; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationshipUpdateInputMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.service.ERModelRelationshipService; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.nio.charset.StandardCharsets; +import java.util.Collection; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.digest.DigestUtils; + +@Slf4j +@RequiredArgsConstructor +public class CreateERModelRelationshipResolver + implements DataFetcher> { + + private final EntityClient _entityClient; + private final ERModelRelationshipService _erModelRelationshipService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + final ERModelRelationshipUpdateInput input = + bindArgument(environment.getArgument("input"), ERModelRelationshipUpdateInput.class); + + final ERModelRelationshipPropertiesInput erModelRelationshipPropertiesInput = + input.getProperties(); + String ermodelrelationName = erModelRelationshipPropertiesInput.getName(); + String source = erModelRelationshipPropertiesInput.getSource(); + String destination = erModelRelationshipPropertiesInput.getDestination(); + + String lowDataset = source; + String highDataset = destination; + if (source.compareTo(destination) > 0) { + lowDataset = destination; + highDataset = source; + } + // The following sequence mimics datahub.emitter.mce_builder.datahub_guid + + String ermodelrelationKey = + "{\"Source\":\"" + + lowDataset + + "\",\"Destination\":\"" + + highDataset + + "\",\"ERModelRelationName\":\"" + + ermodelrelationName + + "\"}"; + + byte[] mybytes = ermodelrelationKey.getBytes(StandardCharsets.UTF_8); + + String ermodelrelationKeyEncoded = new String(mybytes, StandardCharsets.UTF_8); + String ermodelrelationGuid = DigestUtils.md5Hex(ermodelrelationKeyEncoded); + log.info( + "ermodelrelationkey {}, ermodelrelationGuid {}", + ermodelrelationKeyEncoded, + ermodelrelationGuid); + + ERModelRelationshipUrn inputUrn = new ERModelRelationshipUrn(ermodelrelationGuid); + QueryContext context = environment.getContext(); + final Authentication authentication = context.getAuthentication(); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + if (!ERModelRelationshipType.canCreateERModelRelation( + context, + Urn.createFromString(input.getProperties().getSource()), + Urn.createFromString(input.getProperties().getDestination()))) { + throw new AuthorizationException( + "Unauthorized to create erModelRelationship. Please contact your DataHub administrator."); + } + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Create ERModelRelation input: {}", input); + final Collection proposals = + ERModelRelationshipUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to create erModelRelationship entity", e); + } + return ERModelRelationMapper.map( + context, + _erModelRelationshipService.getERModelRelationshipResponse( + Urn.createFromString(inputUrn.toString()), authentication)); + } catch (Exception e) { + log.error( + "Failed to create ERModelRelation to resource with input {}, {}", + input, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create erModelRelationship to resource with input %s", input), + e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java new file mode 100644 index 00000000000000..12294b51654a6a --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java @@ -0,0 +1,249 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship; + +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.ERModelRelationshipUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.AutoCompleteResults; +import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; +import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationMapper; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; +import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; +import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; +import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.browse.BrowseResult; +import com.linkedin.metadata.query.AutoCompleteResult; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ERModelRelationshipType + implements com.linkedin.datahub.graphql.types.EntityType, + BrowsableEntityType, + SearchableEntityType { + + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + ER_MODEL_RELATIONSHIP_KEY_ASPECT_NAME, + ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, + EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME); + + private static final Set FACET_FIELDS = ImmutableSet.of("name"); + private static final String ENTITY_NAME = "erModelRelationship"; + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + public ERModelRelationshipType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = + featureFlags; // TODO: check if ERModelRelation Feture is Enabled and throw error when + // called + } + + @Override + public Class objectClass() { + return ERModelRelationship.class; + } + + @Override + public EntityType type() { + return EntityType.ER_MODEL_RELATIONSHIP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + @Nonnull final List urns, @Nonnull final QueryContext context) throws Exception { + final List ermodelrelationUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + ER_MODEL_RELATIONSHIP_ENTITY_NAME, + new HashSet<>(ermodelrelationUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : ermodelrelationUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ERModelRelationMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to load erModelRelationship entity", e); + } + } + + @Nonnull + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "erModelRelationship", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Nonnull + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(UrnUtils.getUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(context, result); + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ENTITY_NAME, + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + public static boolean canUpdateERModelRelation( + @Nonnull QueryContext context, + ERModelRelationshipUrn resourceUrn, + ERModelRelationshipUpdateInput updateInput) { + final ConjunctivePrivilegeGroup editPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(editPrivilegesGroup, specificPrivilegeGroup)); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } + + public static boolean canCreateERModelRelation( + @Nonnull QueryContext context, Urn sourceUrn, Urn destinationUrn) { + final ConjunctivePrivilegeGroup editPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + final ConjunctivePrivilegeGroup createPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_ER_MODEL_RELATIONSHIP_PRIVILEGE.getType())); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(editPrivilegesGroup, createPrivilegesGroup)); + boolean sourcePrivilege = + AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + sourceUrn.getEntityType(), + sourceUrn.toString(), + orPrivilegeGroups); + boolean destinationPrivilege = + AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + destinationUrn.getEntityType(), + destinationUrn.toString(), + orPrivilegeGroups); + return sourcePrivilege && destinationPrivilege; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java new file mode 100644 index 00000000000000..3e3ea0216c7346 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java @@ -0,0 +1,66 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.CorpuserUrn; +import com.linkedin.common.urn.ERModelRelationshipUrn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationshipUpdateInputMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collection; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class UpdateERModelRelationshipResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final ERModelRelationshipUpdateInput input = + bindArgument(environment.getArgument("input"), ERModelRelationshipUpdateInput.class); + final String urn = bindArgument(environment.getArgument("urn"), String.class); + ERModelRelationshipUrn inputUrn = ERModelRelationshipUrn.createFromString(urn); + QueryContext context = environment.getContext(); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + if (!ERModelRelationshipType.canUpdateERModelRelation(context, inputUrn, input)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Create ERModelRelation input: {}", input); + final Collection proposals = + ERModelRelationshipUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException( + String.format("Failed to update erModelRelationship entity"), e); + } + return true; + } catch (Exception e) { + log.error( + "Failed to update erModelRelationship to resource with input {}, {}", + input, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update erModelRelationship to resource with input %s", input), + e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java new file mode 100644 index 00000000000000..50a7b7f895fe6b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java @@ -0,0 +1,196 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship.mappers; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.GlobalTags; +import com.linkedin.common.GlossaryTerms; +import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; +import com.linkedin.common.Status; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.RelationshipFieldMapping; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.ermodelrelation.ERModelRelationshipProperties; +import com.linkedin.ermodelrelation.EditableERModelRelationshipProperties; +import com.linkedin.metadata.key.ERModelRelationshipKey; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. + * + *

To be replaced by auto-generated mappers implementations + */ +public class ERModelRelationMapper implements ModelMapper { + + public static final ERModelRelationMapper INSTANCE = new ERModelRelationMapper(); + + public static ERModelRelationship map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public ERModelRelationship apply( + @Nullable final QueryContext context, final EntityResponse entityResponse) { + final ERModelRelationship result = new ERModelRelationship(); + final Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ER_MODEL_RELATIONSHIP); + + final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ER_MODEL_RELATIONSHIP_KEY_ASPECT_NAME, this::mapERModelRelationKey); + mappingHelper.mapToResult( + context, ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, this::mapProperties); + if (aspectMap != null + && aspectMap.containsKey(EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)) { + mappingHelper.mapToResult( + EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + } + if (aspectMap != null && aspectMap.containsKey(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + } + if (aspectMap != null && aspectMap.containsKey(OWNERSHIP_ASPECT_NAME)) { + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + } + if (aspectMap != null && aspectMap.containsKey(STATUS_ASPECT_NAME)) { + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setStatus(StatusMapper.map(context, new Status(dataMap)))); + } + if (aspectMap != null && aspectMap.containsKey(GLOBAL_TAGS_ASPECT_NAME)) { + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + this.mapGlobalTags(context, ermodelrelation, dataMap, entityUrn)); + } + if (aspectMap != null && aspectMap.containsKey(GLOSSARY_TERMS_ASPECT_NAME)) { + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + } + return mappingHelper.getResult(); + } + + private void mapEditableProperties( + @Nonnull ERModelRelationship ermodelrelation, @Nonnull DataMap dataMap) { + final EditableERModelRelationshipProperties editableERModelRelationProperties = + new EditableERModelRelationshipProperties(dataMap); + ermodelrelation.setEditableProperties( + com.linkedin.datahub.graphql.generated.ERModelRelationshipEditableProperties.builder() + .setDescription(editableERModelRelationProperties.getDescription()) + .setName(editableERModelRelationProperties.getName()) + .build()); + } + + private void mapERModelRelationKey( + @Nonnull ERModelRelationship ermodelrelation, @Nonnull DataMap datamap) { + ERModelRelationshipKey ermodelrelationKey = new ERModelRelationshipKey(datamap); + ermodelrelation.setId(ermodelrelationKey.getId()); + } + + private void mapProperties( + @Nullable final QueryContext context, + @Nonnull ERModelRelationship ermodelrelation, + @Nonnull DataMap dataMap) { + final ERModelRelationshipProperties ermodelrelationProperties = + new ERModelRelationshipProperties(dataMap); + ermodelrelation.setProperties( + com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties.builder() + .setName(ermodelrelationProperties.getName()) + .setSource(createPartialDataset(ermodelrelationProperties.getSource())) + .setDestination(createPartialDataset(ermodelrelationProperties.getDestination())) + .setCreatedTime( + ermodelrelationProperties.hasCreated() + && ermodelrelationProperties.getCreated().getTime() > 0 + ? ermodelrelationProperties.getCreated().getTime() + : 0) + .setRelationshipFieldMappings( + ermodelrelationProperties.hasRelationshipFieldMappings() + ? this.mapERModelRelationFieldMappings(ermodelrelationProperties) + : null) + .build()); + + if (ermodelrelationProperties.hasCreated() + && Objects.requireNonNull(ermodelrelationProperties.getCreated()).hasActor()) { + ermodelrelation + .getProperties() + .setCreatedActor( + UrnToEntityMapper.map(context, ermodelrelationProperties.getCreated().getActor())); + } + } + + private Dataset createPartialDataset(@Nonnull Urn datasetUrn) { + + Dataset partialDataset = new Dataset(); + + partialDataset.setUrn(datasetUrn.toString()); + + return partialDataset; + } + + private List mapERModelRelationFieldMappings( + ERModelRelationshipProperties ermodelrelationProperties) { + final List relationshipFieldMappingList = new ArrayList<>(); + + ermodelrelationProperties + .getRelationshipFieldMappings() + .forEach( + relationshipFieldMapping -> + relationshipFieldMappingList.add( + this.mapRelationshipFieldMappings(relationshipFieldMapping))); + + return relationshipFieldMappingList; + } + + private com.linkedin.datahub.graphql.generated.RelationshipFieldMapping + mapRelationshipFieldMappings( + com.linkedin.ermodelrelation.RelationshipFieldMapping relationFieldMapping) { + return com.linkedin.datahub.graphql.generated.RelationshipFieldMapping.builder() + .setDestinationField(relationFieldMapping.getDestinationField()) + .setSourceField(relationFieldMapping.getSourceField()) + .build(); + } + + private void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull ERModelRelationship ermodelrelation, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); + ermodelrelation.setTags(globalTags); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java new file mode 100644 index 00000000000000..d18a3e741c4336 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java @@ -0,0 +1,194 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship.mappers; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipEditablePropertiesUpdate; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipPropertiesInput; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.generated.RelationshipFieldMappingInput; +import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; +import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; +import com.linkedin.ermodelrelation.ERModelRelationshipCardinality; +import com.linkedin.ermodelrelation.ERModelRelationshipProperties; +import com.linkedin.ermodelrelation.EditableERModelRelationshipProperties; +import com.linkedin.ermodelrelation.RelationshipFieldMappingArray; +import com.linkedin.mxe.MetadataChangeProposal; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ERModelRelationshipUpdateInputMapper + implements InputModelMapper< + ERModelRelationshipUpdateInput, Collection, Urn> { + public static final ERModelRelationshipUpdateInputMapper INSTANCE = + new ERModelRelationshipUpdateInputMapper(); + + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final ERModelRelationshipUpdateInput ermodelrelationUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, ermodelrelationUpdateInput, actor); + } + + @Override + public Collection apply( + @Nullable final QueryContext context, ERModelRelationshipUpdateInput input, Urn actor) { + final Collection proposals = new ArrayList<>(8); + final UpdateMappingHelper updateMappingHelper = + new UpdateMappingHelper(ER_MODEL_RELATIONSHIP_ENTITY_NAME); + final long currentTime = System.currentTimeMillis(); + final AuditStamp auditstamp = new AuditStamp(); + auditstamp.setActor(actor, SetMode.IGNORE_NULL); + auditstamp.setTime(currentTime); + if (input.getProperties() != null) { + com.linkedin.ermodelrelation.ERModelRelationshipProperties ermodelrelationProperties = + createERModelRelationProperties(input.getProperties(), auditstamp); + proposals.add( + updateMappingHelper.aspectToProposal( + ermodelrelationProperties, ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)); + } + if (input.getEditableProperties() != null) { + final EditableERModelRelationshipProperties editableERModelRelationProperties = + ermodelrelationshipEditablePropsSettings(input.getEditableProperties()); + proposals.add( + updateMappingHelper.aspectToProposal( + editableERModelRelationProperties, + EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)); + } + return proposals; + } + + private ERModelRelationshipProperties createERModelRelationProperties( + ERModelRelationshipPropertiesInput inputProperties, AuditStamp auditstamp) { + com.linkedin.ermodelrelation.ERModelRelationshipProperties ermodelrelationProperties = + new com.linkedin.ermodelrelation.ERModelRelationshipProperties(); + if (inputProperties.getName() != null) { + ermodelrelationProperties.setName(inputProperties.getName()); + } + try { + if (inputProperties.getSource() != null) { + ermodelrelationProperties.setSource( + DatasetUrn.createFromString(inputProperties.getSource())); + } + if (inputProperties.getDestination() != null) { + ermodelrelationProperties.setDestination( + DatasetUrn.createFromString(inputProperties.getDestination())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + + if (inputProperties.getRelationshipFieldmappings() != null) { + if (inputProperties.getRelationshipFieldmappings().size() > 0) { + com.linkedin.ermodelrelation.RelationshipFieldMappingArray relationshipFieldMappingsArray = + ermodelrelationFieldMappingSettings(inputProperties.getRelationshipFieldmappings()); + ermodelrelationProperties.setCardinality( + ermodelrelationCardinalitySettings(inputProperties.getRelationshipFieldmappings())); + ermodelrelationProperties.setRelationshipFieldMappings(relationshipFieldMappingsArray); + } + + if (inputProperties.getCreated() != null && inputProperties.getCreated()) { + ermodelrelationProperties.setCreated(auditstamp); + } else { + if (inputProperties.getCreatedBy() != null && inputProperties.getCreatedAt() != 0) { + final AuditStamp auditstampEdit = new AuditStamp(); + try { + auditstampEdit.setActor(Urn.createFromString(inputProperties.getCreatedBy())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + auditstampEdit.setTime(inputProperties.getCreatedAt()); + ermodelrelationProperties.setCreated(auditstampEdit); + } + } + ermodelrelationProperties.setLastModified(auditstamp); + } + return ermodelrelationProperties; + } + + private com.linkedin.ermodelrelation.ERModelRelationshipCardinality + ermodelrelationCardinalitySettings( + List ermodelrelationFieldMapping) { + + Set sourceFields = new HashSet<>(); + Set destFields = new HashSet<>(); + AtomicInteger sourceCount = new AtomicInteger(); + AtomicInteger destCount = new AtomicInteger(); + + ermodelrelationFieldMapping.forEach( + relationshipFieldMappingInput -> { + sourceFields.add(relationshipFieldMappingInput.getSourceField()); + sourceCount.getAndIncrement(); + destFields.add(relationshipFieldMappingInput.getDestinationField()); + destCount.getAndIncrement(); + }); + + if (sourceFields.size() == sourceCount.get()) { + if (destFields.size() == destCount.get()) { + return ERModelRelationshipCardinality.ONE_ONE; + } else { + return ERModelRelationshipCardinality.N_ONE; + } + } else { + if (destFields.size() == destCount.get()) { + return ERModelRelationshipCardinality.ONE_N; + } else { + return ERModelRelationshipCardinality.N_N; + } + } + } + + private com.linkedin.ermodelrelation.RelationshipFieldMappingArray + ermodelrelationFieldMappingSettings( + List ermodelrelationFieldMapping) { + + List relationshipFieldMappingList = + this.mapRelationshipFieldMapping(ermodelrelationFieldMapping); + + return new RelationshipFieldMappingArray(relationshipFieldMappingList); + } + + private List mapRelationshipFieldMapping( + List ermodelrelationFieldMapping) { + + List relationshipFieldMappingList = + new ArrayList<>(); + + ermodelrelationFieldMapping.forEach( + relationshipFieldMappingInput -> { + com.linkedin.ermodelrelation.RelationshipFieldMapping relationshipFieldMapping = + new com.linkedin.ermodelrelation.RelationshipFieldMapping(); + relationshipFieldMapping.setSourceField(relationshipFieldMappingInput.getSourceField()); + relationshipFieldMapping.setDestinationField( + relationshipFieldMappingInput.getDestinationField()); + relationshipFieldMappingList.add(relationshipFieldMapping); + }); + + return relationshipFieldMappingList; + } + + private static EditableERModelRelationshipProperties ermodelrelationshipEditablePropsSettings( + ERModelRelationshipEditablePropertiesUpdate editPropsInput) { + final EditableERModelRelationshipProperties editableERModelRelationProperties = + new EditableERModelRelationshipProperties(); + if (editPropsInput.getName() != null && editPropsInput.getName().trim().length() > 0) { + editableERModelRelationProperties.setName(editPropsInput.getName()); + } + if (editPropsInput.getDescription() != null + && editPropsInput.getDescription().trim().length() > 0) { + editableERModelRelationProperties.setDescription(editPropsInput.getDescription()); + } + return editableERModelRelationProperties; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java new file mode 100644 index 00000000000000..4f2ae014995de0 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java @@ -0,0 +1,132 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import com.linkedin.datahub.graphql.generated.FormInfo; +import com.linkedin.datahub.graphql.generated.FormPrompt; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.FormType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class FormMapper implements ModelMapper { + + public static final FormMapper INSTANCE = new FormMapper(); + + public static Form map(@Nullable final QueryContext context, @Nonnull final EntityResponse form) { + return INSTANCE.apply(context, form); + } + + public Form apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + Form result = new Form(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityUrn.toString()); + result.setType(EntityType.FORM); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper

mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(FORM_INFO_ASPECT_NAME, this::mapFormInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (form, dataMap) -> + form.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + + return mappingHelper.getResult(); + } + + private void mapFormInfo(@Nonnull Form form, @Nonnull DataMap dataMap) { + com.linkedin.form.FormInfo gmsFormInfo = new com.linkedin.form.FormInfo(dataMap); + FormInfo formInfo = new FormInfo(); + formInfo.setName(gmsFormInfo.getName()); + formInfo.setType(FormType.valueOf(gmsFormInfo.getType().toString())); + if (gmsFormInfo.hasDescription()) { + formInfo.setDescription(gmsFormInfo.getDescription()); + } + formInfo.setPrompts(this.mapFormPrompts(gmsFormInfo, form.getUrn())); + formInfo.setActors(mapFormActors(gmsFormInfo.getActors())); + form.setInfo(formInfo); + } + + private List mapFormPrompts( + @Nonnull com.linkedin.form.FormInfo gmsFormInfo, @Nonnull String formUrn) { + List formPrompts = new ArrayList<>(); + if (gmsFormInfo.hasPrompts()) { + gmsFormInfo + .getPrompts() + .forEach(FormPrompt -> formPrompts.add(mapFormPrompt(FormPrompt, formUrn))); + } + return formPrompts; + } + + private FormPrompt mapFormPrompt( + @Nonnull com.linkedin.form.FormPrompt gmsFormPrompt, @Nonnull String formUrn) { + final FormPrompt formPrompt = new FormPrompt(); + formPrompt.setId(gmsFormPrompt.getId()); + formPrompt.setTitle(gmsFormPrompt.getTitle()); + formPrompt.setType(FormPromptType.valueOf(gmsFormPrompt.getType().toString())); + formPrompt.setRequired(gmsFormPrompt.isRequired()); + formPrompt.setFormUrn(formUrn); + if (gmsFormPrompt.hasDescription()) { + formPrompt.setDescription(gmsFormPrompt.getDescription()); + } + + if (gmsFormPrompt.hasStructuredPropertyParams()) { + final StructuredPropertyParams params = new StructuredPropertyParams(); + final Urn structuredPropUrn = gmsFormPrompt.getStructuredPropertyParams().getUrn(); + final StructuredPropertyEntity structuredProp = new StructuredPropertyEntity(); + structuredProp.setUrn(structuredPropUrn.toString()); + structuredProp.setType(EntityType.STRUCTURED_PROPERTY); + params.setStructuredProperty(structuredProp); + formPrompt.setStructuredPropertyParams(params); + } + + return formPrompt; + } + + private FormActorAssignment mapFormActors(com.linkedin.form.FormActorAssignment gmsFormActors) { + FormActorAssignment result = new FormActorAssignment(); + result.setOwners(gmsFormActors.isOwners()); + if (gmsFormActors.hasUsers()) { + result.setUsers( + gmsFormActors.getUsers().stream().map(this::mapUser).collect(Collectors.toList())); + } + if (gmsFormActors.hasGroups()) { + result.setGroups( + gmsFormActors.getGroups().stream().map(this::mapGroup).collect(Collectors.toList())); + } + return result; + } + + private CorpUser mapUser(Urn userUrn) { + CorpUser user = new CorpUser(); + user.setUrn(userUrn.toString()); + return user; + } + + private CorpGroup mapGroup(Urn groupUrn) { + CorpGroup group = new CorpGroup(); + group.setUrn(groupUrn.toString()); + return group; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java new file mode 100644 index 00000000000000..5edee2b1dcdeb8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class FormType implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(FORM_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.FORM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Form.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List formUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + FORM_ENTITY_NAME, + new HashSet<>(formUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : formUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(FormMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Forms", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java new file mode 100644 index 00000000000000..43665b37b9ee87 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java @@ -0,0 +1,133 @@ +package com.linkedin.datahub.graphql.types.form; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.FieldFormPromptAssociationArray; +import com.linkedin.common.FormPromptAssociationArray; +import com.linkedin.common.Forms; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FieldFormPromptAssociation; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptFieldAssociations; +import com.linkedin.datahub.graphql.generated.FormVerificationAssociation; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nonnull; + +public class FormsMapper { + + public static final FormsMapper INSTANCE = new FormsMapper(); + + public static com.linkedin.datahub.graphql.generated.Forms map( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + return INSTANCE.apply(forms, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Forms apply( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + final List incompleteForms = new ArrayList<>(); + forms + .getIncompleteForms() + .forEach( + formAssociation -> + incompleteForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List completeForms = new ArrayList<>(); + forms + .getCompletedForms() + .forEach( + formAssociation -> + completeForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List verifications = new ArrayList<>(); + forms + .getVerifications() + .forEach( + verificationAssociation -> + verifications.add(this.mapVerificationAssociation(verificationAssociation))); + + return new com.linkedin.datahub.graphql.generated.Forms( + incompleteForms, completeForms, verifications); + } + + private FormAssociation mapFormAssociation( + @Nonnull final com.linkedin.common.FormAssociation association, + @Nonnull final String entityUrn) { + FormAssociation result = new FormAssociation(); + result.setForm( + Form.builder().setType(EntityType.FORM).setUrn(association.getUrn().toString()).build()); + result.setAssociatedUrn(entityUrn); + result.setCompletedPrompts(this.mapPrompts(association.getCompletedPrompts())); + result.setIncompletePrompts(this.mapPrompts(association.getIncompletePrompts())); + return result; + } + + private FormVerificationAssociation mapVerificationAssociation( + @Nonnull final com.linkedin.common.FormVerificationAssociation verificationAssociation) { + FormVerificationAssociation result = new FormVerificationAssociation(); + result.setForm( + Form.builder() + .setType(EntityType.FORM) + .setUrn(verificationAssociation.getForm().toString()) + .build()); + if (verificationAssociation.hasLastModified()) { + result.setLastModified(createAuditStamp(verificationAssociation.getLastModified())); + } + return result; + } + + private List mapPrompts( + @Nonnull final FormPromptAssociationArray promptAssociations) { + List result = new ArrayList<>(); + promptAssociations.forEach( + promptAssociation -> { + FormPromptAssociation association = new FormPromptAssociation(); + association.setId(promptAssociation.getId()); + association.setLastModified(createAuditStamp(promptAssociation.getLastModified())); + if (promptAssociation.hasFieldAssociations()) { + association.setFieldAssociations( + mapFieldAssociations(promptAssociation.getFieldAssociations())); + } + result.add(association); + }); + return result; + } + + private List mapFieldPrompts( + @Nonnull final FieldFormPromptAssociationArray fieldPromptAssociations) { + List result = new ArrayList<>(); + fieldPromptAssociations.forEach( + fieldFormPromptAssociation -> { + FieldFormPromptAssociation association = new FieldFormPromptAssociation(); + association.setFieldPath(fieldFormPromptAssociation.getFieldPath()); + association.setLastModified( + createAuditStamp(fieldFormPromptAssociation.getLastModified())); + result.add(association); + }); + return result; + } + + private FormPromptFieldAssociations mapFieldAssociations( + com.linkedin.common.FormPromptFieldAssociations associationsObj) { + final FormPromptFieldAssociations fieldAssociations = new FormPromptFieldAssociations(); + if (associationsObj.hasCompletedFieldPrompts()) { + fieldAssociations.setCompletedFieldPrompts( + this.mapFieldPrompts(associationsObj.getCompletedFieldPrompts())); + } + if (associationsObj.hasIncompleteFieldPrompts()) { + fieldAssociations.setIncompleteFieldPrompts( + this.mapFieldPrompts(associationsObj.getIncompleteFieldPrompts())); + } + return fieldAssociations; + } + + private ResolvedAuditStamp createAuditStamp(AuditStamp auditStamp) { + final ResolvedAuditStamp resolvedAuditStamp = new ResolvedAuditStamp(); + final CorpUser emptyCreatedUser = new CorpUser(); + emptyCreatedUser.setUrn(auditStamp.getActor().toString()); + resolvedAuditStamp.setActor(emptyCreatedUser); + resolvedAuditStamp.setTime(auditStamp.getTime()); + return resolvedAuditStamp; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index 9a27a1fba853fe..91eb843030576d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -29,7 +31,11 @@ public class GlossaryNodeType static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_NODE_KEY_ASPECT_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); + GLOSSARY_NODE_KEY_ASPECT_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private final EntityClient _entityClient; @@ -66,7 +72,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } @@ -76,7 +82,7 @@ public List> batchLoad( gmsGlossaryNode == null ? null : DataFetcherResult.newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) + .data(GlossaryNodeMapper.map(context, gmsGlossaryNode)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index c40740238f61e1..9d697cd9220b73 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -58,7 +57,9 @@ public class GlossaryTermType STATUS_ASPECT_NAME, BROWSE_PATHS_ASPECT_NAME, DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME); + DEPRECATION_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private final EntityClient _entityClient; @@ -95,7 +96,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : glossaryTermUrns) { gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); } @@ -105,7 +106,7 @@ public List> batchLoad( gmsGlossaryTerm == null ? null : DataFetcherResult.newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) + .data(GlossaryTermMapper.map(context, gmsGlossaryTerm)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -124,14 +125,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "glossaryTerm", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -144,8 +144,8 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "glossaryTerm", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), "glossaryTerm", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -161,8 +161,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "glossaryTerm", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "glossaryTerm", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -171,6 +176,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon final StringArray result = _entityClient.getBrowsePaths( GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 901361eb0b2be6..4912d18614f415 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,32 +1,43 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; +import com.linkedin.common.Forms; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryNodeProperties; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.key.GlossaryNodeKey; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GlossaryNodeMapper implements ModelMapper { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); - public static GlossaryNode map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static GlossaryNode map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { + public GlossaryNode apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { GlossaryNode result = new GlossaryNode(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.GLOSSARY_NODE); @@ -36,23 +47,43 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( GLOSSARY_NODE_INFO_ASPECT_NAME, - (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + (glossaryNode, dataMap) -> + glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap, entityUrn))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + glossaryNode.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), GlossaryNode.class); + } else { + return mappingHelper.getResult(); + } } - private GlossaryNodeProperties mapGlossaryNodeProperties(@Nonnull DataMap dataMap) { + private GlossaryNodeProperties mapGlossaryNodeProperties( + @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { GlossaryNodeInfo glossaryNodeInfo = new GlossaryNodeInfo(dataMap); GlossaryNodeProperties result = new GlossaryNodeProperties(); result.setDescription(glossaryNodeInfo.getDefinition()); if (glossaryNodeInfo.hasName()) { result.setName(glossaryNodeInfo.getName()); } + if (glossaryNodeInfo.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryNodeInfo.getCustomProperties(), entityUrn)); + } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index a02f79535399f0..1274646f45ec49 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,13 +1,17 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; @@ -15,14 +19,18 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.key.GlossaryTermKey; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -33,12 +41,14 @@ public class GlossaryTermMapper implements ModelMapper - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + glossaryTerm.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + glossaryTerm.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> dataset.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); // If there's no name property, resort to the legacy name computation. if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { @@ -82,7 +103,11 @@ public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { if (result.getProperties() != null && result.getProperties().getName() == null) { result.getProperties().setName(legacyName); } - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), GlossaryTerm.class); + } else { + return mappingHelper.getResult(); + } } private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { @@ -91,8 +116,11 @@ private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull Dat glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + private void mapDomains( + @Nullable QueryContext context, + @Nonnull GlossaryTerm glossaryTerm, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); + glossaryTerm.setDomain(DomainAssociationMapper.map(context, domains, glossaryTerm.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index 8494eace222448..705b924d208ffb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.generated.GlossaryTerms; @@ -19,18 +23,23 @@ public class GlossaryTermsMapper { public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); public static GlossaryTerms map( + @Nonnull final QueryContext context, @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(glossaryTerms, entityUrn); + return INSTANCE.apply(context, glossaryTerms, entityUrn); } public GlossaryTerms apply( + @Nonnull final QueryContext context, @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); result.setTerms( glossaryTerms.getTerms().stream() + .filter( + association -> + context == null || canView(context.getOperationContext(), association.getUrn())) .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) .collect(Collectors.toList())); return result; @@ -40,13 +49,24 @@ private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossa @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); resultGlossaryTerm.setUrn(input.getUrn().toString()); resultGlossaryTerm.setName( GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); + + if (input.hasActor()) { + CorpUser actor = new CorpUser(); + actor.setUrn(input.getActor().toString()); + actor.setType(EntityType.CORP_USER); + result.setActor(actor); + } + if (entityUrn != null) { + result.setAssociatedUrn(entityUrn.toString()); + } + return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java new file mode 100644 index 00000000000000..c2aae4bd27d543 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java @@ -0,0 +1,76 @@ +package com.linkedin.datahub.graphql.types.incident; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.datahub.graphql.generated.IncidentSource; +import com.linkedin.datahub.graphql.generated.IncidentSourceType; +import com.linkedin.datahub.graphql.generated.IncidentState; +import com.linkedin.datahub.graphql.generated.IncidentStatus; +import com.linkedin.datahub.graphql.generated.IncidentType; +import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.metadata.Constants; +import javax.annotation.Nullable; + +/** Maps a GMS {@link EntityResponse} to a GraphQL incident. */ +public class IncidentMapper { + + public static Incident map(@Nullable QueryContext context, final EntityResponse entityResponse) { + final Incident result = new Incident(); + final Urn entityUrn = entityResponse.getUrn(); + final EnvelopedAspectMap aspects = entityResponse.getAspects(); + result.setType(EntityType.INCIDENT); + result.setUrn(entityUrn.toString()); + + final EnvelopedAspect envelopedIncidentInfo = aspects.get(Constants.INCIDENT_INFO_ASPECT_NAME); + if (envelopedIncidentInfo != null) { + final IncidentInfo info = new IncidentInfo(envelopedIncidentInfo.getValue().data()); + // Assumption alert! This assumes the incident type in GMS exactly equals that in GraphQL + result.setIncidentType(IncidentType.valueOf(info.getType().name())); + result.setCustomType(info.getCustomType(GetMode.NULL)); + result.setTitle(info.getTitle(GetMode.NULL)); + result.setDescription(info.getDescription(GetMode.NULL)); + result.setPriority(info.getPriority(GetMode.NULL)); + // TODO: Support multiple entities per incident. + result.setEntity(UrnToEntityMapper.map(context, info.getEntities().get(0))); + if (info.hasSource()) { + result.setSource(mapIncidentSource(context, info.getSource())); + } + if (info.hasStatus()) { + result.setStatus(mapStatus(context, info.getStatus())); + } + result.setCreated(AuditStampMapper.map(context, info.getCreated())); + } else { + throw new RuntimeException(String.format("Incident does not exist!. urn: %s", entityUrn)); + } + return result; + } + + private static IncidentStatus mapStatus( + @Nullable QueryContext context, final com.linkedin.incident.IncidentStatus incidentStatus) { + final IncidentStatus result = new IncidentStatus(); + result.setState(IncidentState.valueOf(incidentStatus.getState().name())); + result.setMessage(incidentStatus.getMessage(GetMode.NULL)); + result.setLastUpdated(AuditStampMapper.map(context, incidentStatus.getLastUpdated())); + return result; + } + + private static IncidentSource mapIncidentSource( + @Nullable QueryContext context, final com.linkedin.incident.IncidentSource incidentSource) { + final IncidentSource result = new IncidentSource(); + result.setType(IncidentSourceType.valueOf(incidentSource.getType().name())); + if (incidentSource.hasSourceUrn()) { + result.setSource(UrnToEntityMapper.map(context, incidentSource.getSourceUrn())); + } + return result; + } + + private IncidentMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java new file mode 100644 index 00000000000000..0ef204f4073ad6 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java @@ -0,0 +1,86 @@ +package com.linkedin.datahub.graphql.types.incident; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import graphql.execution.DataFetcherResult; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class IncidentType + implements com.linkedin.datahub.graphql.types.EntityType { + + static final Set ASPECTS_TO_FETCH = ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + public IncidentType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.INCIDENT; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Incident.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List incidentUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + Constants.INCIDENT_ENTITY_NAME, + new HashSet<>(incidentUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : incidentUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(IncidentMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Incidents", e); + } + } + + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java new file mode 100644 index 00000000000000..2c5e84dad28c21 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql.types.ingest.secret.mapper; + +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; + +import com.linkedin.common.AuditStamp; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.SetMode; +import com.linkedin.entity.EntityResponse; +import com.linkedin.secret.DataHubSecretValue; +import java.util.Objects; +import javax.annotation.Nonnull; + +/** + * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. + * + *

To be replaced by auto-generated mappers implementations + */ +public class DataHubSecretValueMapper { + + public static final DataHubSecretValueMapper INSTANCE = new DataHubSecretValueMapper(); + + public static DataHubSecretValue map( + EntityResponse fromSecret, + @Nonnull final String name, + @Nonnull final String value, + String description, + AuditStamp auditStamp) { + return INSTANCE.apply(fromSecret, name, value, description, auditStamp); + } + + public DataHubSecretValue apply( + EntityResponse existingSecret, + @Nonnull final String name, + @Nonnull final String value, + String description, + AuditStamp auditStamp) { + final DataHubSecretValue result; + if (Objects.nonNull(existingSecret)) { + result = + new DataHubSecretValue( + existingSecret.getAspects().get(SECRET_VALUE_ASPECT_NAME).getValue().data()); + } else { + result = new DataHubSecretValue(); + } + + result.setName(name); + result.setValue(value); + result.setDescription(description, SetMode.IGNORE_NULL); + if (Objects.nonNull(auditStamp)) { + result.setCreated(auditStamp); + } + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index 621fcf5f041403..2b576230c99a23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -1,28 +1,32 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AutoCompleteResultsMapper implements ModelMapper { public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); + public static AutoCompleteResults map( + @Nullable final QueryContext context, @Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(context, results); } @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { + public AutoCompleteResults apply( + @Nullable final QueryContext context, @Nonnull final AutoCompleteResult input) { final AutoCompleteResults result = new AutoCompleteResults(); result.setQuery(input.getQuery()); result.setSuggestions(input.getSuggestions()); result.setEntities( input.getEntities().stream() - .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .map(entity -> UrnToEntityMapper.map(context, entity.getUrn())) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index 689ff82147e15f..2d5deec7edb513 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -1,22 +1,24 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.Constants; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePath; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathMapper implements ModelMapper { public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); + public static BrowsePath map(@Nullable final QueryContext context, @Nonnull final String input) { + return INSTANCE.apply(context, input); } @Override - public BrowsePath apply(@Nonnull final String input) { + public BrowsePath apply(@Nullable final QueryContext context, @Nonnull final String input) { final BrowsePath browsePath = new BrowsePath(); final List path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index ae70823d675d8a..bb70e1ae4b77d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePath; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathsMapper implements ModelMapper, List> { public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List map(@Nonnull final List input) { - return INSTANCE.apply(input); + public static List map( + @Nullable final QueryContext context, @Nonnull final List input) { + return INSTANCE.apply(context, input); } @Override - public List apply(@Nonnull final List input) { + public List apply( + @Nullable final QueryContext context, @Nonnull final List input) { List results = new ArrayList<>(); for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); + results.add(BrowsePathMapper.map(context, pathStr)); } return results; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index 5cac03b19a74c1..3c2661a80b873e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroup; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResults; @@ -8,11 +9,13 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowseResultMapper { private BrowseResultMapper() {} - public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { + public static BrowseResults map( + @Nullable final QueryContext context, com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); if (!input.hasFrom() || !input.hasPageSize() || !input.hasNumElements()) { @@ -24,13 +27,14 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setTotal(input.getNumElements()); final BrowseResultMetadata browseResultMetadata = new BrowseResultMetadata(); - browseResultMetadata.setPath(BrowsePathMapper.map(input.getMetadata().getPath()).getPath()); + browseResultMetadata.setPath( + BrowsePathMapper.map(context, input.getMetadata().getPath()).getPath()); browseResultMetadata.setTotalNumEntities(input.getMetadata().getTotalNumEntities()); result.setMetadata(browseResultMetadata); List entities = input.getEntities().stream() - .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .map(entity -> UrnToEntityMapper.map(context, entity.getUrn())) .collect(Collectors.toList()); result.setEntities(entities); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index c58341f994d4f3..984ef0fdcf2543 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nullable; + /** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper { - O apply(final I input, final A actor); + O apply(@Nullable final QueryContext context, final I input, final A actor); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 7c7dab2e024720..3cae0155a86db5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -4,13 +4,15 @@ import static com.linkedin.metadata.utils.SearchUtil.*; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregationMetadata; import com.linkedin.datahub.graphql.generated.FacetMetadata; import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SearchSuggestion; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; import java.net.URISyntaxException; @@ -18,6 +20,7 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -25,14 +28,16 @@ public class MapperUtils { private MapperUtils() {} - public static SearchResult mapResult(SearchEntity searchEntity) { + public static SearchResult mapResult( + @Nullable final QueryContext context, SearchEntity searchEntity) { return new SearchResult( - UrnToEntityMapper.map(searchEntity.getEntity()), + UrnToEntityMapper.map(context, searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), - getMatchedFieldEntry(searchEntity.getMatchedFields())); + getMatchedFieldEntry(context, searchEntity.getMatchedFields())); } public static FacetMetadata mapFacet( + @Nullable final QueryContext context, com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); List aggregationFacets = @@ -54,7 +59,7 @@ public static FacetMetadata mapFacet( filterValue.getFacetCount(), filterValue.getEntity() == null ? null - : UrnToEntityMapper.map(filterValue.getEntity()))) + : UrnToEntityMapper.map(context, filterValue.getEntity()))) .collect(Collectors.toList())); return facetMetadata; } @@ -71,6 +76,7 @@ public static String convertFilterValue(String filterValue, List isEnti } public static List getMatchedFieldEntry( + @Nullable final QueryContext context, List highlightMetadata) { return highlightMetadata.stream() .map( @@ -81,8 +87,10 @@ public static List getMatchedFieldEntry( if (SearchUtils.isUrn(field.getValue())) { try { Urn urn = Urn.createFromString(field.getValue()); - matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { + ValidationUtils.validateUrn( + context.getOperationContext().getEntityRegistry(), urn); + matchedField.setEntity(UrnToEntityMapper.map(context, urn)); + } catch (IllegalArgumentException | URISyntaxException e) { log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 2167be9f27ca8f..8df26365c45aad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + /** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper { - O apply(final I input); + O apply(@Nullable final QueryContext context, @Nonnull final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index baf632ae8bdf4c..88214ac999a7b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; @@ -14,14 +15,16 @@ import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnScrollAcrossLineageResultsMapper { public static ScrollAcrossLineageResults map( - LineageScrollResult searchResult) { - return new UrnScrollAcrossLineageResultsMapper().apply(searchResult); + @Nullable final QueryContext context, LineageScrollResult searchResult) { + return new UrnScrollAcrossLineageResultsMapper().apply(context, searchResult); } - public ScrollAcrossLineageResults apply(LineageScrollResult input) { + public ScrollAcrossLineageResults apply( + @Nullable final QueryContext context, LineageScrollResult input) { final ScrollAcrossLineageResults result = new ScrollAcrossLineageResults(); result.setNextScrollId(input.getScrollId()); @@ -30,28 +33,33 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + input.getEntities().stream().map(r -> mapResult(context, r)).collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> mapFacet(context, f)) .collect(Collectors.toList())); return result; } - private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { + private SearchAcrossLineageResult mapResult( + @Nullable final QueryContext context, LineageSearchEntity searchEntity) { return SearchAcrossLineageResult.builder() - .setEntity(UrnToEntityMapper.map(searchEntity.getEntity())) + .setEntity(UrnToEntityMapper.map(context, searchEntity.getEntity())) .setInsights(getInsightsFromFeatures(searchEntity.getFeatures())) - .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) - .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) + .setMatchedFields(getMatchedFieldEntry(context, searchEntity.getMatchedFields())) + .setPaths( + searchEntity.getPaths().stream() + .map(p -> mapPath(context, p)) + .collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) .build(); } - private EntityPath mapPath(UrnArray path) { + private EntityPath mapPath(@Nullable final QueryContext context, UrnArray path) { EntityPath entityPath = new EntityPath(); - entityPath.setPath(path.stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + entityPath.setPath( + path.stream().map(p -> UrnToEntityMapper.map(context, p)).collect(Collectors.toList())); return entityPath; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index 72eb71cd095bb2..10d17bf1756e7f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ScrollResults; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnScrollResultsMapper { public static ScrollResults map( + @Nullable final QueryContext context, com.linkedin.metadata.search.ScrollResult scrollResult) { - return new UrnScrollResultsMapper().apply(scrollResult); + return new UrnScrollResultsMapper().apply(context, scrollResult); } - public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { + public ScrollResults apply( + @Nullable final QueryContext context, com.linkedin.metadata.search.ScrollResult input) { final ScrollResults result = new ScrollResults(); if (!input.hasScrollId() && (!input.hasPageSize() || !input.hasNumEntities())) { @@ -25,10 +29,12 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + input.getEntities().stream() + .map(r -> MapperUtils.mapResult(context, r)) + .collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index 642fe90cf2aedf..b85303909c0801 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; @@ -15,15 +16,18 @@ import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import java.util.ArrayList; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnSearchAcrossLineageResultsMapper { public static SearchAcrossLineageResults map( - LineageSearchResult searchResult) { - return new UrnSearchAcrossLineageResultsMapper().apply(searchResult); + @Nullable final QueryContext context, LineageSearchResult searchResult) { + return new UrnSearchAcrossLineageResultsMapper().apply(context, searchResult); } - public SearchAcrossLineageResults apply(LineageSearchResult input) { + public SearchAcrossLineageResults apply( + @Nullable final QueryContext context, LineageSearchResult input) { final SearchAcrossLineageResults result = new SearchAcrossLineageResults(); result.setStart(input.getFrom()); @@ -32,10 +36,10 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + input.getEntities().stream().map(r -> mapResult(context, r)).collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); if (input.hasFreshness()) { @@ -55,19 +59,26 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { return result; } - private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { + private SearchAcrossLineageResult mapResult( + @Nullable final QueryContext context, LineageSearchEntity searchEntity) { return SearchAcrossLineageResult.builder() - .setEntity(UrnToEntityMapper.map(searchEntity.getEntity())) + .setEntity(UrnToEntityMapper.map(context, searchEntity.getEntity())) .setInsights(getInsightsFromFeatures(searchEntity.getFeatures())) - .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) - .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) + .setMatchedFields(getMatchedFieldEntry(context, searchEntity.getMatchedFields())) + .setPaths( + searchEntity.getPaths().stream() + .map(p -> mapPath(context, p)) + .collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) + .setDegrees(new ArrayList<>(searchEntity.getDegrees())) + .setExplored(Boolean.TRUE.equals(searchEntity.isExplored())) .build(); } - private EntityPath mapPath(UrnArray path) { + private EntityPath mapPath(@Nullable final QueryContext context, UrnArray path) { EntityPath entityPath = new EntityPath(); - entityPath.setPath(path.stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + entityPath.setPath( + path.stream().map(p -> UrnToEntityMapper.map(context, p)).collect(Collectors.toList())); return entityPath; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index d814c44e469bcc..c7c50c8f40c15c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnSearchResultsMapper { public static SearchResults map( + @Nullable final QueryContext context, com.linkedin.metadata.search.SearchResult searchResult) { - return new UrnSearchResultsMapper().apply(searchResult); + return new UrnSearchResultsMapper().apply(context, searchResult); } - public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { + public SearchResults apply( + @Nullable final QueryContext context, com.linkedin.metadata.search.SearchResult input) { final SearchResults result = new SearchResults(); if (!input.hasFrom() || !input.hasPageSize() || !input.hasNumEntities()) { @@ -25,10 +29,12 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + input.getEntities().stream() + .map(r -> MapperUtils.mapResult(context, r)) + .collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); result.setSuggestions( searchResultMetadata.getSuggestions().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index da3ddd1115437f..b8781b12303503 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -92,7 +91,7 @@ public List> batchLoad( gmsMlFeatureTable == null ? null : DataFetcherResult.newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) + .data(MLFeatureTableMapper.map(context, gmsMlFeatureTable)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -111,14 +110,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlFeatureTable", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -131,8 +129,8 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "mlFeatureTable", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), "mlFeatureTable", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -148,8 +146,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "mlFeatureTable", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlFeatureTable", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -157,6 +160,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon throws Exception { final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index 6f94ea44cd476e..bbfa92ae454659 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -20,7 +20,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -82,7 +81,7 @@ public List> batchLoad( gmsMlFeature == null ? null : DataFetcherResult.newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) + .data(MLFeatureMapper.map(context, gmsMlFeature)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -101,14 +100,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlFeature", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -120,7 +118,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete( + context.getOperationContext(), "mlFeature", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index d505b70effdd4c..24179ffd96426c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -92,7 +91,7 @@ public List> batchLoad( gmsMlModelGroup == null ? null : DataFetcherResult.newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) + .data(MLModelGroupMapper.map(context, gmsMlModelGroup)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -111,14 +110,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlModelGroup", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -131,8 +129,8 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "mlModelGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), "mlModelGroup", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -148,8 +146,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "mlModelGroup", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlModelGroup", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -158,6 +161,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon final StringArray result = _entityClient.getBrowsePaths( MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index 27b791d78e78ea..c3d29c91b0598f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -87,7 +86,7 @@ public List> batchLoad( gmsMlModel == null ? null : DataFetcherResult.newResult() - .data(MLModelMapper.map(gmsMlModel)) + .data(MLModelMapper.map(context, gmsMlModel)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -106,14 +105,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlModel", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -125,8 +123,8 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete(context.getOperationContext(), "mlModel", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -142,8 +140,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - "mlModel", pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlModel", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -151,6 +154,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon throws Exception { final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index 10cfe181dd292f..4e1ef996ecc0d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -20,7 +20,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -60,6 +59,7 @@ public Class objectClass() { @Override public List> batchLoad( final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlPrimaryKeyUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); @@ -82,7 +82,7 @@ public List> batchLoad( gmsMlPrimaryKey == null ? null : DataFetcherResult.newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) + .data(MLPrimaryKeyMapper.map(context, gmsMlPrimaryKey)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -101,14 +101,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "mlPrimaryKey", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -121,7 +120,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - "mlPrimaryKey", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), "mlPrimaryKey", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index 7db1216e1390d2..6485313b030cba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class BaseDataMapper implements ModelMapper { public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); + public static BaseData map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(context, input); } @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { + public BaseData apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.BaseData input) { final BaseData result = new BaseData(); result.setDataset(input.getDataset().toString()); result.setMotivation(input.getMotivation()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index 108717f325f681..b3b642ec9f1261 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class CaveatsAndRecommendationsMapper @@ -12,16 +14,18 @@ public class CaveatsAndRecommendationsMapper new CaveatsAndRecommendationsMapper(); public static CaveatsAndRecommendations map( + @Nullable QueryContext context, @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); + return INSTANCE.apply(context, caveatsAndRecommendations); } @Override public CaveatsAndRecommendations apply( + @Nullable QueryContext context, com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); + result.setCaveats(CaveatsDetailsMapper.map(context, caveatsAndRecommendations.getCaveats())); } if (caveatsAndRecommendations.getRecommendations() != null) { result.setRecommendations(caveatsAndRecommendations.getRecommendations()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 2226197e673f50..9b89e955205461 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class CaveatsDetailsMapper @@ -9,12 +11,14 @@ public class CaveatsDetailsMapper public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); + public static CaveatDetails map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(context, input); } @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + public CaveatDetails apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.CaveatDetails input) { final CaveatDetails result = new CaveatDetails(); result.setCaveatDescription(input.getCaveatDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 8959e59265e14e..4d0983177fb746 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class EthicalConsiderationsMapper @@ -10,12 +12,14 @@ public class EthicalConsiderationsMapper public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); public static EthicalConsiderations map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); + return INSTANCE.apply(context, ethicalConsiderations); } @Override public EthicalConsiderations apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { final EthicalConsiderations result = new EthicalConsiderations(); result.setData(ethicalConsiderations.getData()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 212db94081371a..442ce052c1c8f1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; +import javax.annotation.Nullable; import lombok.NonNull; public class HyperParameterMapMapper @@ -10,17 +12,19 @@ public class HyperParameterMapMapper public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); + public static HyperParameterMap map( + @Nullable QueryContext context, @NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(context, input); } @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { + public HyperParameterMap apply( + @Nullable QueryContext context, @NonNull final HyperParameterValueTypeMap input) { final HyperParameterMap result = new HyperParameterMap(); for (String key : input.keySet()) { result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); + result.setValue(HyperParameterValueTypeMapper.map(context, input.get(key))); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index f60f34dd7a085f..8b5bc445a36092 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BooleanBox; import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.HyperParameterValueType; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class HyperParameterValueTypeMapper @@ -15,12 +17,14 @@ public class HyperParameterValueTypeMapper public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); public static HyperParameterValueType map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); + return INSTANCE.apply(context, input); } @Override public HyperParameterValueType apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { HyperParameterValueType result = null; @@ -33,7 +37,7 @@ public HyperParameterValueType apply( } else if (input.isDouble()) { result = new FloatBox(input.getDouble()); } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); + result = new FloatBox(Double.valueOf(input.getFloat())); } else { throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 9f724ae71a55e9..6a7d1aae7679b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class IntendedUseMapper @@ -11,12 +13,16 @@ public class IntendedUseMapper public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); + public static IntendedUse map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(context, intendedUse); } @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + public IntendedUse apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { final IntendedUse result = new IntendedUse(); result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); result.setPrimaryUses(intendedUse.getPrimaryUses()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 58e59edfa2e389..a4f3aa7a0e2261 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -13,6 +15,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; @@ -26,8 +30,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -35,19 +41,23 @@ import com.linkedin.metadata.key.MLFeatureKey; import com.linkedin.ml.metadata.EditableMLFeatureProperties; import com.linkedin.ml.metadata.MLFeatureProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper { public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLFeature map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { + public MLFeature apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLFeature result = new MLFeature(); Urn entityUrn = entityResponse.getUrn(); @@ -58,79 +68,101 @@ public MLFeature apply(@Nonnull final EntityResponse entityResponse) { result.setLastIngested(lastIngested); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, MLFeatureMapper::mapMLFeatureKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); + mlFeature.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, ML_FEATURE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapMLFeatureProperties); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> mlFeature.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlFeature, dataMap) -> mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlFeature, dataMap) -> + mlFeature.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlFeature.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLFeatureMapper::mapDomains); mappingHelper.mapToResult( - ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> - entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + entity.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((mlFeature, dataMap) -> + mlFeature.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLFeature.class); + } else { + return mappingHelper.getResult(); + } } - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + private static void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); mlFeature.setName(mlFeatureKey.getName()); mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); } - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + private static void mapMLFeatureProperties( + @Nullable final QueryContext context, + @Nonnull MLFeature mlFeature, + @Nonnull DataMap dataMap) { MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); mlFeature.setDescription(featureProperties.getDescription()); if (featureProperties.getDataType() != null) { mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } } - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLFeature entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull MLFeature entity, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { + private static void mapEditableProperties(MLFeature entity, DataMap dataMap) { EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 7bcefbc305192b..92d090275867da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLFeaturePropertiesMapper @@ -13,12 +15,14 @@ public class MLFeaturePropertiesMapper public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); public static MLFeatureProperties map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); + return INSTANCE.apply(context, mlFeatureProperties); } @Override public MLFeatureProperties apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { final MLFeatureProperties result = new MLFeatureProperties(); @@ -27,7 +31,7 @@ public MLFeatureProperties apply( result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); } if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); + result.setVersion(VersionTagMapper.map(context, mlFeatureProperties.getVersion())); } if (mlFeatureProperties.getSources() != null) { result.setSources( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index d074e14f95c82c..30bf4dda1cf4fd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -13,6 +15,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -26,8 +30,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -35,19 +41,23 @@ import com.linkedin.metadata.key.MLFeatureTableKey; import com.linkedin.ml.metadata.EditableMLFeatureTableProperties; import com.linkedin.ml.metadata.MLFeatureTableProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper { public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLFeatureTable map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { + public MLFeatureTable apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLFeatureTable result = new MLFeatureTable(); Urn entityUrn = entityResponse.getUrn(); @@ -61,47 +71,63 @@ public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mlFeatureTable.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); mappingHelper.mapToResult( ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, - (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); + (entity, dataMap) -> this.mapMLFeatureTableProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> mlFeatureTable.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); + mlFeatureTable.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlFeatureTable.setDeprecation( + DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLFeatureTableMapper::mapDomains); mappingHelper.mapToResult( ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> - entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + entity.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((mlFeatureTable, dataMap) -> + mlFeatureTable.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLFeatureTable.class); + } else { + return mappingHelper.getResult(); + } } private void mapMLFeatureTableKey( @@ -113,27 +139,34 @@ private void mapMLFeatureTableKey( mlFeatureTable.setPlatform(partialPlatform); } - private void mapMLFeatureTableProperties( - @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { + private static void mapMLFeatureTableProperties( + @Nullable final QueryContext context, + @Nonnull MLFeatureTable mlFeatureTable, + @Nonnull DataMap dataMap, + Urn entityUrn) { MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); mlFeatureTable.setFeatureTableProperties( - MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); mlFeatureTable.setProperties( - MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); mlFeatureTable.setDescription(featureTableProperties.getDescription()); } - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLFeatureTable entity, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index fff504d43c81a1..d9fed13ed0d0be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -1,11 +1,15 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLFeatureTablePropertiesMapper { @@ -14,12 +18,14 @@ public class MLFeatureTablePropertiesMapper { new MLFeatureTablePropertiesMapper(); public static MLFeatureTableProperties map( + @Nullable final QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + return INSTANCE.apply(context, mlFeatureTableProperties, entityUrn); } - public MLFeatureTableProperties apply( + public static MLFeatureTableProperties apply( + @Nullable final QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { final MLFeatureTableProperties result = new MLFeatureTableProperties(); @@ -28,6 +34,7 @@ public MLFeatureTableProperties apply( if (mlFeatureTableProperties.getMlFeatures() != null) { result.setMlFeatures( mlFeatureTableProperties.getMlFeatures().stream() + .filter(f -> context == null || canView(context.getOperationContext(), f)) .map( urn -> { final MLFeature mlFeature = new MLFeature(); @@ -40,6 +47,7 @@ public MLFeatureTableProperties apply( if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { result.setMlPrimaryKeys( mlFeatureTableProperties.getMlPrimaryKeys().stream() + .filter(k -> context == null || canView(context.getOperationContext(), k)) .map( urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index bb3c85e411e715..37989b3bda8273 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MLHyperParamMapper @@ -9,12 +11,14 @@ public class MLHyperParamMapper public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); + public static MLHyperParam map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(context, input); } @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + public MLHyperParam apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLHyperParam input) { final MLHyperParam result = new MLHyperParam(); result.setDescription(input.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 765a44d218567d..80ebabec283bb3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLMetric; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MLMetricMapper implements ModelMapper { public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); + public static MLMetric map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(context, metric); } @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + public MLMetric apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLMetric metric) { final MLMetric result = new MLMetric(); result.setDescription(metric.getDescription()); result.setValue(metric.getValue()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index e86072ce3848e8..4316251a464f28 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelFactorPromptsMapper @@ -11,24 +13,26 @@ public class MLModelFactorPromptsMapper public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); public static MLModelFactorPrompts map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); + return INSTANCE.apply(context, input); } @Override public MLModelFactorPrompts apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); if (input.getEvaluationFactors() != null) { mlModelFactorPrompts.setEvaluationFactors( input.getEvaluationFactors().stream() - .map(MLModelFactorsMapper::map) + .map(f -> MLModelFactorsMapper.map(context, f)) .collect(Collectors.toList())); } if (input.getRelevantFactors() != null) { mlModelFactorPrompts.setRelevantFactors( input.getRelevantFactors().stream() - .map(MLModelFactorsMapper::map) + .map(f -> MLModelFactorsMapper.map(context, f)) .collect(Collectors.toList())); } return mlModelFactorPrompts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index 3b212eca52801a..5607ef8c2cf130 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelFactorsMapper @@ -11,12 +13,14 @@ public class MLModelFactorsMapper public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); public static MLModelFactors map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); + return INSTANCE.apply(context, modelFactors); } @Override public MLModelFactors apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { final MLModelFactors result = new MLModelFactors(); if (mlModelFactors.getEnvironment() != null) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index cc9baaa33a660a..7e99040e44c82e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.Ownership; @@ -12,6 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; @@ -25,8 +29,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -34,19 +40,23 @@ import com.linkedin.metadata.key.MLModelGroupKey; import com.linkedin.ml.metadata.EditableMLModelGroupProperties; import com.linkedin.ml.metadata.MLModelGroupProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper { public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLModelGroup map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { + public MLModelGroup apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLModelGroup result = new MLModelGroup(); Urn entityUrn = entityResponse.getUrn(); @@ -60,43 +70,62 @@ public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); + mlModelGroup.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( - ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); + ML_MODEL_GROUP_KEY_ASPECT_NAME, MLModelGroupMapper::mapToMLModelGroupKey); + mappingHelper.mapToResult( + context, + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, + MLModelGroupMapper::mapToMLModelGroupProperties); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlModelGroup, dataMap) -> mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlModelGroup, dataMap) -> + mlModelGroup.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlModelGroup.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> MLModelGroupMapper.mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLModelGroupMapper::mapDomains); mappingHelper.mapToResult( - ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, MLModelGroupMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mlModelGroup.setBrowsePathV2( + BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((mlModelGroup, dataMap) -> + mlModelGroup.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModelGroup.class); + } else { + return mappingHelper.getResult(); + } } - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + private static void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); mlModelGroup.setName(mlModelGroupKey.getName()); mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); @@ -105,28 +134,33 @@ private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { mlModelGroup.setPlatform(partialPlatform); } - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { + private static void mapToMLModelGroupProperties( + @Nullable final QueryContext context, MLModelGroup mlModelGroup, DataMap dataMap) { MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(context, modelGroupProperties)); if (modelGroupProperties.getDescription() != null) { mlModelGroup.setDescription(modelGroupProperties.getDescription()); } } - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLModelGroup entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLModelGroup entity, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + private static void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index bae60a026b49a3..9f1918f9ec4893 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelGroupPropertiesMapper @@ -11,18 +13,20 @@ public class MLModelGroupPropertiesMapper public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); public static MLModelGroupProperties map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); + return INSTANCE.apply(context, mlModelGroupProperties); } @Override public MLModelGroupProperties apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { final MLModelGroupProperties result = new MLModelGroupProperties(); result.setDescription(mlModelGroupProperties.getDescription()); if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); + result.setVersion(VersionTagMapper.map(context, mlModelGroupProperties.getVersion())); } result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 827b35c282237a..a3bc5c663c89ae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Cost; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -14,6 +16,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; @@ -29,8 +33,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -47,20 +53,24 @@ import com.linkedin.ml.metadata.QuantitativeAnalyses; import com.linkedin.ml.metadata.SourceCode; import com.linkedin.ml.metadata.TrainingData; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper { public static final MLModelMapper INSTANCE = new MLModelMapper(); - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLModel map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { + public MLModel apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLModel result = new MLModel(); Urn entityUrn = entityResponse.getUrn(); @@ -71,36 +81,36 @@ public MLModel apply(@Nonnull final EntityResponse entityResponse) { result.setLastIngested(lastIngested); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, MLModelMapper::mapMLModelKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mlModel.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( ML_MODEL_PROPERTIES_ASPECT_NAME, - (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapMLModelProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); + (mlModel, dataMap) -> mapGlobalTags(context, mlModel, dataMap, entityUrn)); mappingHelper.mapToResult( INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); + mlModel.setIntendedUse(IntendedUseMapper.map(context, new IntendedUse(dataMap)))); mappingHelper.mapToResult( ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setFactorPrompts( - MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); + MLModelFactorPromptsMapper.map(context, new MLModelFactorPrompts(dataMap)))); mappingHelper.mapToResult( METRICS_ASPECT_NAME, - (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(context, new Metrics(dataMap)))); mappingHelper.mapToResult( EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setEvaluationData( new EvaluationData(dataMap) .getEvaluationData().stream() - .map(BaseDataMapper::map) + .map(d -> BaseDataMapper.map(context, d)) .collect(Collectors.toList()))); mappingHelper.mapToResult( TRAINING_DATA_ASPECT_NAME, @@ -108,60 +118,76 @@ public MLModel apply(@Nonnull final EntityResponse entityResponse) { mlModel.setTrainingData( new TrainingData(dataMap) .getTrainingData().stream() - .map(BaseDataMapper::map) + .map(d -> BaseDataMapper.map(context, d)) .collect(Collectors.toList()))); mappingHelper.mapToResult( QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setQuantitativeAnalyses( - QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); + QuantitativeAnalysesMapper.map(context, new QuantitativeAnalyses(dataMap)))); mappingHelper.mapToResult( ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setEthicalConsiderations( - EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); + EthicalConsiderationsMapper.map(context, new EthicalConsiderations(dataMap)))); mappingHelper.mapToResult( CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCaveatsAndRecommendations( - CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); + CaveatsAndRecommendationsMapper.map( + context, new CaveatsAndRecommendations(dataMap)))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(context, SOURCE_CODE_ASPECT_NAME, MLModelMapper::mapSourceCode); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( - COST_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(new Cost(dataMap)))); + COST_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(context, new Cost(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlModel.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLModelMapper::mapDomains); mappingHelper.mapToResult( - ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, MLModelMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModel.class); + } else { + return mappingHelper.getResult(); + } } - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + private static void mapMLModelKey(MLModel mlModel, DataMap dataMap) { MLModelKey mlModelKey = new MLModelKey(dataMap); mlModel.setName(mlModelKey.getName()); mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); @@ -170,40 +196,44 @@ private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { mlModel.setPlatform(partialPlatform); } - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + private static void mapMLModelProperties( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap, Urn entityUrn) { MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); + mlModel.setProperties(MLModelPropertiesMapper.map(context, modelProperties, entityUrn)); if (modelProperties.getDescription() != null) { mlModel.setDescription(modelProperties.getDescription()); } } - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); mlModel.setGlobalTags(graphQlGlobalTags); mlModel.setTags(graphQlGlobalTags); } - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { + private static void mapSourceCode( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap) { SourceCode sourceCode = new SourceCode(dataMap); com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = new com.linkedin.datahub.graphql.generated.SourceCode(); graphQlSourceCode.setSourceCode( sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map) + .map(c -> SourceCodeUrlMapper.map(context, c)) .collect(Collectors.toList())); mlModel.setSourceCode(graphQlSourceCode); } - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull MLModel entity, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLModel entity, DataMap dataMap) { + private static void mapEditableProperties(MLModel entity, DataMap dataMap) { EditableMLModelProperties input = new EditableMLModelProperties(dataMap); MLModelEditableProperties editableProperties = new MLModelEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index f2781f5bca5c88..a89904b3ab915c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,10 +1,14 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroup; import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelPropertiesMapper { @@ -12,12 +16,16 @@ public class MLModelPropertiesMapper { public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); public static MLModelProperties map( - @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + Urn entityUrn) { + return INSTANCE.apply(context, mlModelProperties, entityUrn); } public MLModelProperties apply( - @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + Urn entityUrn) { final MLModelProperties result = new MLModelProperties(); result.setDate(mlModelProperties.getDate()); @@ -32,7 +40,7 @@ public MLModelProperties apply( if (mlModelProperties.getHyperParams() != null) { result.setHyperParams( mlModelProperties.getHyperParams().stream() - .map(param -> MLHyperParamMapper.map(param)) + .map(param -> MLHyperParamMapper.map(context, param)) .collect(Collectors.toList())); } @@ -42,13 +50,14 @@ public MLModelProperties apply( if (mlModelProperties.getTrainingMetrics() != null) { result.setTrainingMetrics( mlModelProperties.getTrainingMetrics().stream() - .map(metric -> MLMetricMapper.map(metric)) + .map(metric -> MLMetricMapper.map(context, metric)) .collect(Collectors.toList())); } if (mlModelProperties.getGroups() != null) { result.setGroups( mlModelProperties.getGroups().stream() + .filter(g -> context == null || canView(context.getOperationContext(), g)) .map( group -> { final MLModelGroup subgroup = new MLModelGroup(); @@ -61,6 +70,7 @@ public MLModelProperties apply( if (mlModelProperties.getMlFeatures() != null) { result.setMlFeatures( mlModelProperties.getMlFeatures().stream() + .filter(f -> context == null || canView(context.getOperationContext(), f)) .map(Urn::toString) .collect(Collectors.toList())); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index a8efd748401f02..36784f96ea30ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -12,6 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; @@ -24,8 +28,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -33,19 +39,23 @@ import com.linkedin.metadata.key.MLPrimaryKeyKey; import com.linkedin.ml.metadata.EditableMLPrimaryKeyProperties; import com.linkedin.ml.metadata.MLPrimaryKeyProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper { public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLPrimaryKey map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { + public MLPrimaryKey apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLPrimaryKey result = new MLPrimaryKey(); Urn entityUrn = entityResponse.getUrn(); @@ -59,52 +69,73 @@ public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); + mlPrimaryKey.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( - ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); + ML_PRIMARY_KEY_KEY_ASPECT_NAME, MLPrimaryKeyMapper::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + context, + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, + MLPrimaryKeyMapper::mapMLPrimaryKeyProperties); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> mlPrimaryKey.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlPrimaryKey, dataMap) -> mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlPrimaryKey.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLPrimaryKeyMapper::mapDomains); mappingHelper.mapToResult( - ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, MLPrimaryKeyMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - return mappingHelper.getResult(); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLPrimaryKey.class); + } else { + return mappingHelper.getResult(); + } } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + private static void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + private static void mapMLPrimaryKeyProperties( + @Nullable final QueryContext context, MLPrimaryKey mlPrimaryKey, DataMap dataMap) { MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setPrimaryKeyProperties( + MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); if (primaryKeyProperties.getDataType() != null) { mlPrimaryKey.setDataType( @@ -112,20 +143,24 @@ private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMa } } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLPrimaryKey entity, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + private static void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 16d6120cd9dff2..09e41fe7ee4e8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLPrimaryKeyPropertiesMapper @@ -14,12 +16,14 @@ public class MLPrimaryKeyPropertiesMapper public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); public static MLPrimaryKeyProperties map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); + return INSTANCE.apply(context, mlPrimaryKeyProperties); } @Override public MLPrimaryKeyProperties apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); @@ -29,7 +33,7 @@ public MLPrimaryKeyProperties apply( MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); } if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); + result.setVersion(VersionTagMapper.map(context, mlPrimaryKeyProperties.getVersion())); } result.setSources( mlPrimaryKeyProperties.getSources().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 76fa8c84e95710..ce6357655dfbf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MetricsMapper implements ModelMapper { public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); + public static Metrics map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(context, metrics); } @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + public Metrics apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.Metrics metrics) { final Metrics result = new Metrics(); result.setDecisionThreshold(metrics.getDecisionThreshold()); result.setPerformanceMeasures(metrics.getPerformanceMeasures()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index e46cb0a074bd7b..fbb259666c2731 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class QuantitativeAnalysesMapper @@ -10,17 +12,20 @@ public class QuantitativeAnalysesMapper public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); public static QuantitativeAnalyses map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); + return INSTANCE.apply(context, quantitativeAnalyses); } @Override public QuantitativeAnalyses apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { final QuantitativeAnalyses result = new QuantitativeAnalyses(); result.setIntersectionalResults( - ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); + ResultsTypeMapper.map(context, quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults( + ResultsTypeMapper.map(context, quantitativeAnalyses.getUnitaryResults())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 4b6529c59db3e8..e73f80511fbaa5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class ResultsTypeMapper @@ -10,12 +12,14 @@ public class ResultsTypeMapper public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); + public static ResultsType map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(context, input); } @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + public ResultsType apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.ResultsType input) { final ResultsType result; if (input.isString()) { result = new StringBox(input.getString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index b6bd5efdc42175..1b0695e5993494 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,20 +1,24 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SourceCodeUrlMapper implements ModelMapper { public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); + public static SourceCodeUrl map( + @Nullable QueryContext context, @Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(context, input); } @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + public SourceCodeUrl apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { final SourceCodeUrl results = new SourceCodeUrl(); results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 5758a52538c1e5..4020ef6e35ece9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -1,20 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; import com.linkedin.common.VersionTag; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class VersionTagMapper implements ModelMapper { public static final VersionTagMapper INSTANCE = new VersionTagMapper(); public static com.linkedin.datahub.graphql.generated.VersionTag map( - @Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); + @Nullable QueryContext context, @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(context, versionTag); } @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { + public com.linkedin.datahub.graphql.generated.VersionTag apply( + @Nullable QueryContext context, @Nonnull final VersionTag input) { final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); result.setVersionTag(input.getVersionTag()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index b6990c3816b53f..e40690d58eb1cf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -38,7 +38,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -95,14 +94,13 @@ public SearchResults search( final Map facetFilters = Collections.emptyMap(); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), NOTEBOOK_ENTITY_NAME, query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -115,8 +113,8 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), NOTEBOOK_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -135,8 +133,13 @@ public BrowseResults browse( path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = _entityClient.browse( - NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + NOTEBOOK_ENTITY_NAME, + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override @@ -145,7 +148,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c final StringArray result = _entityClient.getBrowsePaths( NotebookUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -182,7 +185,7 @@ public List> batchLoad( entityResponse == null ? null : DataFetcherResult.newResult() - .data(NotebookMapper.map(entityResponse)) + .data(NotebookMapper.map(context, entityResponse)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -204,9 +207,9 @@ public Notebook update( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - Collection proposals = NotebookUpdateInputMapper.map(input, actor); + CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + Collection proposals = + NotebookUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -224,7 +227,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.NOTEBOOK_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index a263e31b26faf1..109006f9d4a90a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -12,6 +13,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.ChartCell; @@ -46,16 +49,17 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class NotebookMapper implements ModelMapper { public static final NotebookMapper INSTANCE = new NotebookMapper(); - public static Notebook map(EntityResponse response) { - return INSTANCE.apply(response); + public static Notebook map(@Nullable final QueryContext context, EntityResponse response) { + return INSTANCE.apply(context, response); } @Override - public Notebook apply(EntityResponse response) { + public Notebook apply(@Nullable final QueryContext context, EntityResponse response) { final Notebook convertedNotebook = new Notebook(); Urn entityUrn = response.getUrn(); @@ -63,45 +67,54 @@ public Notebook apply(EntityResponse response) { convertedNotebook.setType(EntityType.NOTEBOOK); EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); - mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); + mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, NotebookMapper::mapNotebookKey); mappingHelper.mapToResult( NOTEBOOK_INFO_ASPECT_NAME, - (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); + (entity, dataMap) -> mapNotebookInfo(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); + context, NOTEBOOK_CONTENT_ASPECT_NAME, NotebookMapper::mapNotebookContent); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, NotebookMapper::mapEditableNotebookProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> - notebook.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + notebook.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + notebook.setTags(GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> notebook.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, NotebookMapper::mapDomains); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, NotebookMapper::mapSubTypes); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> notebook.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, DATA_PLATFORM_INSTANCE_ASPECT_NAME, NotebookMapper::mapDataPlatformInstance); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Notebook.class); + } else { + return mappingHelper.getResult(); + } } - private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { + private static void mapDataPlatformInstance( + @Nullable final QueryContext context, Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); notebook.setPlatform( DataPlatform.builder() @@ -109,10 +122,10 @@ private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { .setUrn(dataPlatformInstance.getPlatform().toString()) .build()); notebook.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap))); } - private void mapSubTypes(Notebook notebook, DataMap dataMap) { + private static void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { com.linkedin.datahub.graphql.generated.SubTypes subTypes = @@ -122,20 +135,23 @@ private void mapSubTypes(Notebook notebook, DataMap dataMap) { } } - private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final NotebookKey notebookKey = new NotebookKey(dataMap); notebook.setNotebookId(notebookKey.getNotebookId()); notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo( - @Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { + private static void mapNotebookInfo( + @Nullable final QueryContext context, + @Nonnull Notebook notebook, + @Nonnull DataMap dataMap, + Urn entityUrn) { final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); notebookInfo.setChangeAuditStamps( - ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -149,16 +165,17 @@ private void mapNotebookInfo( notebook.setInfo(notebookInfo); } - private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapNotebookContent( + @Nullable final QueryContext context, @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); - notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); + notebookContent.setCells(mapNotebookCells(context, pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List mapNotebookCells( - com.linkedin.notebook.NotebookCellArray pegasusCells) { + private static List mapNotebookCells( + @Nullable final QueryContext context, com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() .map( pegasusCell -> { @@ -168,13 +185,13 @@ private List mapNotebookCells( notebookCell.setType(cellType); switch (cellType) { case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); + notebookCell.setChartCell(mapChartCell(context, pegasusCell.getChartCell())); break; case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); + notebookCell.setTextCell(mapTextCell(context, pegasusCell.getTextCell())); break; case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); + notebookCell.setQueryChell(mapQueryCell(context, pegasusCell.getQueryCell())); break; default: throw new DataHubGraphQLException( @@ -186,39 +203,43 @@ private List mapNotebookCells( .collect(Collectors.toList()); } - private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) { + private static ChartCell mapChartCell( + @Nullable final QueryContext context, com.linkedin.notebook.ChartCell pegasusChartCell) { ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); chartCell.setChangeAuditStamps( - ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, pegasusChartCell.getChangeAuditStamps())); return chartCell; } - private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { + private static TextCell mapTextCell( + @Nullable final QueryContext context, com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); textCell.setChangeAuditStamps( - ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } - private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) { + private static QueryCell mapQueryCell( + @Nullable final QueryContext context, com.linkedin.notebook.QueryCell pegasusQueryCell) { QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); queryCell.setChangeAuditStamps( - ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { - queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); + queryCell.setLastExecuted(AuditStampMapper.map(context, pegasusQueryCell.getLastExecuted())); } return queryCell; } - private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapEditableNotebookProperties( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); @@ -226,9 +247,10 @@ private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull notebook.setEditableProperties(notebookEditableProperties); } - private void mapDomains(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - notebook.setDomain(DomainAssociationMapper.map(domains, notebook.getUrn())); + notebook.setDomain(DomainAssociationMapper.map(context, domains, notebook.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0d6c70e07053fc..62d1e488482ac3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -8,6 +8,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,6 +19,7 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class NotebookUpdateInputMapper implements InputModelMapper, Urn> { @@ -25,12 +27,15 @@ public class NotebookUpdateInputMapper public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); public static Collection map( - @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(notebookUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final NotebookUpdateInput notebookUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, notebookUpdateInput, actor); } @Override - public Collection apply(NotebookUpdateInput input, Urn actor) { + public Collection apply( + @Nullable final QueryContext context, NotebookUpdateInput input, Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(NOTEBOOK_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -40,7 +45,8 @@ public Collection apply(NotebookUpdateInput input, Urn a if (input.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(input.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + OwnershipUpdateMapper.map(context, input.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { @@ -48,7 +54,7 @@ public Collection apply(NotebookUpdateInput input, Urn a globalTags.setTags( new TagAssociationArray( input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index f7ed4c59a805a5..6b78ba113225ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -69,7 +69,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(OwnershipTypeMapper.map(gmsResult)) + .data(OwnershipTypeMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 9eebe95df8d8cc..76d41897dafd66 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; @@ -15,17 +16,19 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OwnershipTypeMapper implements ModelMapper { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); - public static OwnershipTypeEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static OwnershipTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { + public OwnershipTypeEntity apply(@Nullable QueryContext context, @Nonnull EntityResponse input) { final OwnershipTypeEntity result = new OwnershipTypeEntity(); result.setUrn(input.getUrn().toString()); @@ -35,7 +38,7 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 318818b8a21408..e40ae84f2c131c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.DataHubPolicy; import com.linkedin.datahub.graphql.generated.EntityType; @@ -26,28 +27,32 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataHubPolicyMapper implements ModelMapper { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); - public static DataHubPolicy map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubPolicy map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubPolicy apply(@Nonnull final EntityResponse entityResponse) { + public DataHubPolicy apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubPolicy result = new DataHubPolicy(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATAHUB_POLICY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATAHUB_POLICY_INFO_ASPECT_NAME, this::mapDataHubPolicyInfo); + mappingHelper.mapToResult(context, DATAHUB_POLICY_INFO_ASPECT_NAME, this::mapDataHubPolicyInfo); return mappingHelper.getResult(); } - private void mapDataHubPolicyInfo(@Nonnull DataHubPolicy policy, @Nonnull DataMap dataMap) { + private void mapDataHubPolicyInfo( + @Nullable QueryContext context, @Nonnull DataHubPolicy policy, @Nonnull DataMap dataMap) { DataHubPolicyInfo policyInfo = new DataHubPolicyInfo(dataMap); policy.setDescription(policyInfo.getDescription()); // Careful - we assume no other Policy types or states have been ingested using a backdoor. @@ -58,7 +63,7 @@ private void mapDataHubPolicyInfo(@Nonnull DataHubPolicy policy, @Nonnull DataMa policy.setActors(mapActors(policyInfo.getActors())); policy.setEditable(policyInfo.isEditable()); if (policyInfo.hasResources()) { - policy.setResources(mapResources(policyInfo.getResources())); + policy.setResources(mapResources(context, policyInfo.getResources())); } } @@ -88,7 +93,8 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { return result; } - private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) { + private ResourceFilter mapResources( + @Nullable QueryContext context, final DataHubResourceFilter resourceFilter) { final ResourceFilter result = new ResourceFilter(); result.setAllResources(resourceFilter.isAllResources()); if (resourceFilter.hasType()) { @@ -98,12 +104,13 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) result.setResources(resourceFilter.getResources()); } if (resourceFilter.hasFilter()) { - result.setFilter(mapFilter(resourceFilter.getFilter())); + result.setFilter(mapFilter(context, resourceFilter.getFilter())); } return result; } - private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { + private PolicyMatchFilter mapFilter( + @Nullable QueryContext context, final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() .setCriteria( filter.getCriteria().stream() @@ -113,7 +120,7 @@ private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter .setField(criterion.getField()) .setValues( criterion.getValues().stream() - .map(this::mapValue) + .map(c -> mapValue(context, c)) .collect(Collectors.toList())) .setCondition( PolicyMatchCondition.valueOf(criterion.getCondition().name())) @@ -122,13 +129,13 @@ private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter .build(); } - private PolicyMatchCriterionValue mapValue(final String value) { + private PolicyMatchCriterionValue mapValue(@Nullable QueryContext context, final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); return PolicyMatchCriterionValue.builder() .setValue(value) - .setEntity(UrnToEntityMapper.map(urn)) + .setEntity(UrnToEntityMapper.map(context, urn)) .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 3dea9046dcf36f..ac7cd2bc0a83c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -66,7 +66,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataHubPolicyMapper.map(gmsResult)) + .data(DataHubPolicyMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index f35111f78a6944..674011a4f2f288 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Media; @@ -17,17 +18,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class PostMapper implements ModelMapper { public static final PostMapper INSTANCE = new PostMapper(); - public static Post map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Post map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Post apply(@Nonnull final EntityResponse entityResponse) { + public Post apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Post result = new Post(); result.setUrn(entityResponse.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index 2bdcda3592608d..e71b569e9ae238 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -2,10 +2,13 @@ import static com.linkedin.metadata.Constants.*; +import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.QueryEntity; @@ -13,6 +16,7 @@ import com.linkedin.datahub.graphql.generated.QuerySource; import com.linkedin.datahub.graphql.generated.QueryStatement; import com.linkedin.datahub.graphql.generated.QuerySubject; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -22,29 +26,46 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class QueryMapper implements ModelMapper { public static final QueryMapper INSTANCE = new QueryMapper(); - public static QueryEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static QueryEntity map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { + public QueryEntity apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final QueryEntity result = new QueryEntity(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.QUERY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); + mappingHelper.mapToResult(context, QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); mappingHelper.mapToResult(QUERY_SUBJECTS_ASPECT_NAME, this::mapQuerySubjects); + mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapPlatform); return mappingHelper.getResult(); } - private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { + private void mapPlatform(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { + DataPlatformInstance aspect = new DataPlatformInstance(dataMap); + if (aspect.hasPlatform()) { + final DataPlatform platform = new DataPlatform(); + platform.setUrn(aspect.getPlatform().toString()); + platform.setType(EntityType.DATA_PLATFORM); + query.setPlatform(platform); + } + } + + private void mapQueryProperties( + @Nullable final QueryContext context, @Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); @@ -57,6 +78,9 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); + if (queryProperties.hasOrigin() && queryProperties.getOrigin() != null) { + res.setOrigin(UrnToEntityMapper.map(context, queryProperties.getOrigin())); + } AuditStamp created = new AuditStamp(); created.setTime(queryProperties.getCreated().getTime()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index 0c1fd33e381104..602b3699d11e4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -21,7 +21,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class QueryType implements com.linkedin.datahub.graphql.types.EntityType { @@ -50,6 +52,7 @@ public List> batchLoad( final List viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + log.debug("Fetching query entities: {}", viewUrns); final Map entities = _entityClient.batchGetV2( QUERY_ENTITY_NAME, @@ -57,7 +60,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : viewUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -67,7 +70,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(QueryMapper.map(gmsResult)) + .data(QueryMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index db086e682d57c9..b20e78e149c3e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataFlowDataJobsRelationshipsMapper implements ModelMapper { @@ -12,17 +14,19 @@ public class DataFlowDataJobsRelationshipsMapper new DataFlowDataJobsRelationshipsMapper(); public static DataFlowDataJobsRelationships map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); + return INSTANCE.apply(context, relationships); } @Override public DataFlowDataJobsRelationships apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships input) { final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); result.setEntities( input.getRelationships().stream() - .map(EntityRelationshipLegacyMapper::map) + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 4df64c7ecb85ed..6a03a060c3687a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DownstreamEntityRelationshipsMapper implements ModelMapper { @@ -12,17 +14,19 @@ public class DownstreamEntityRelationshipsMapper new DownstreamEntityRelationshipsMapper(); public static DownstreamEntityRelationships map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); + return INSTANCE.apply(context, relationships); } @Override public DownstreamEntityRelationships apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships input) { final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); result.setEntities( input.getRelationships().stream() - .map(EntityRelationshipLegacyMapper::map) + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index e3743804b49080..7ab37031d824c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.EntityWithRelationships; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EntityRelationshipLegacyMapper implements ModelMapper { @@ -14,22 +16,24 @@ public class EntityRelationshipLegacyMapper new EntityRelationshipLegacyMapper(); public static EntityRelationshipLegacy map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); + return INSTANCE.apply(context, relationship); } @Override public EntityRelationshipLegacy apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationship relationship) { final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); EntityWithRelationships partialLineageEntity = - (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); + (EntityWithRelationships) UrnToEntityMapper.map(context, relationship.getEntity()); if (partialLineageEntity != null) { result.setEntity(partialLineageEntity); } if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); + result.setCreated(AuditStampMapper.map(context, relationship.getCreated())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 832e1bb396b3ba..35abc849e8f978 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UpstreamEntityRelationshipsMapper implements ModelMapper { @@ -12,17 +14,19 @@ public class UpstreamEntityRelationshipsMapper new UpstreamEntityRelationshipsMapper(); public static UpstreamEntityRelationships map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); + return INSTANCE.apply(context, relationships); } @Override public UpstreamEntityRelationships apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships input) { final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); result.setEntities( input.getRelationships().stream() - .map(EntityRelationshipLegacyMapper::map) + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java new file mode 100644 index 00000000000000..cf40cc51d1e231 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java @@ -0,0 +1,32 @@ +package com.linkedin.datahub.graphql.types.restricted; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.entity.EntityResponse; +import io.datahubproject.metadata.services.RestrictedService; +import javax.annotation.Nonnull; + +public class RestrictedMapper { + + public static final RestrictedMapper INSTANCE = new RestrictedMapper(); + + public static Restricted map( + @Nonnull final EntityResponse entityResponse, + @Nonnull final RestrictedService restrictedService) { + return INSTANCE.apply(entityResponse, restrictedService); + } + + public Restricted apply( + @Nonnull final EntityResponse entityResponse, + @Nonnull final RestrictedService restrictedService) { + final Restricted result = new Restricted(); + Urn entityUrn = entityResponse.getUrn(); + String restrictedUrnString = restrictedService.encryptRestrictedUrn(entityUrn).toString(); + + result.setUrn(restrictedUrnString); + result.setType(EntityType.RESTRICTED); + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java new file mode 100644 index 00000000000000..a245397c8d83d4 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java @@ -0,0 +1,103 @@ +package com.linkedin.datahub.graphql.types.restricted; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.datahub.graphql.types.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import io.datahubproject.metadata.services.RestrictedService; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class RestrictedType implements EntityType { + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(); + + private final EntityClient _entityClient; + private final RestrictedService _restrictedService; + + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.RESTRICTED; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Restricted.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List restrictedUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List entityUrns = + restrictedUrns.stream() + .map(_restrictedService::decryptRestrictedUrn) + .collect(Collectors.toList()); + + // Create a map for entityType: entityUrns so we can fetch by entity type below + final Map> entityTypeToUrns = createEntityTypeToUrnsMap(entityUrns); + + try { + // Fetch from the DB for each entity type and add to one result map + final Map entities = new HashMap<>(); + entityTypeToUrns + .keySet() + .forEach( + entityType -> { + try { + entities.putAll( + _entityClient.batchGetV2( + entityType, + new HashSet<>(entityTypeToUrns.get(entityType)), + ASPECTS_TO_FETCH, + context.getAuthentication())); + } catch (Exception e) { + throw new RuntimeException("Failed to fetch restricted entities", e); + } + }); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(RestrictedMapper.map(gmsResult, _restrictedService)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } + + private Map> createEntityTypeToUrnsMap(final List urns) { + final Map> entityTypeToUrns = new HashMap<>(); + urns.forEach( + urn -> { + String entityType = urn.getEntityType(); + List existingUrns = + entityTypeToUrns.computeIfAbsent(entityType, k -> new ArrayList<>()); + existingUrns.add(urn); + }); + return entityTypeToUrns; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 95219457701955..530518d1cd14cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -67,7 +67,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataHubRoleMapper.map(gmsResult)) + .data(DataHubRoleMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 7a467886fc0844..7ba42b08cdc6af 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -11,17 +12,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataHubRoleMapper implements ModelMapper { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); - public static DataHubRole map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubRole map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubRole apply(@Nonnull final EntityResponse entityResponse) { + public DataHubRole apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubRole result = new DataHubRole(); result.setUrn(entityResponse.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index d51e0d06c0fdaa..46d494bc9219cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -18,7 +18,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; @@ -87,7 +86,9 @@ public List> batchLoad( gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(RoleMapper.map(gmsResult)).build()) + : DataFetcherResult.newResult() + .data(RoleMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Role", e); @@ -104,14 +105,13 @@ public SearchResults search( throws Exception { final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), Constants.ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -124,7 +124,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete( - Constants.ROLE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + context.getOperationContext(), Constants.ROLE_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java index 3eb090e4524395..2d6bd31c84fd90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; +import com.linkedin.common.RoleAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Role; @@ -19,8 +20,10 @@ public com.linkedin.datahub.graphql.generated.Access apply( @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.Access result = new com.linkedin.datahub.graphql.generated.Access(); + RoleAssociationArray roles = + access.getRoles() != null ? access.getRoles() : new RoleAssociationArray(); result.setRoles( - access.getRoles().stream() + roles.stream() .map(association -> this.mapRoleAssociation(association, entityUrn)) .collect(Collectors.toList())); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index df18b7c89fafc0..80337cd9a53388 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Actor; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -18,13 +19,15 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class RoleMapper implements ModelMapper { public static final RoleMapper INSTANCE = new RoleMapper(); - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Role map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { @@ -59,7 +62,7 @@ private static List mapRoleUsers(RoleUserArray users) { } @Override - public Role apply(EntityResponse input) { + public Role apply(@Nullable QueryContext context, EntityResponse input) { final Role result = new Role(); final Urn entityUrn = input.getUrn(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java new file mode 100644 index 00000000000000..3d1833e9c944ad --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java @@ -0,0 +1,58 @@ +package com.linkedin.datahub.graphql.types.schemafield; + +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class SchemaFieldMapper implements ModelMapper { + + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + + public static SchemaFieldEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public SchemaFieldEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + Urn entityUrn = entityResponse.getUrn(); + final SchemaFieldEntity result = this.mapSchemaFieldUrn(context, entityUrn); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((schemaField, dataMap) -> + schemaField.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + + return result; + } + + private SchemaFieldEntity mapSchemaFieldUrn(@Nullable QueryContext context, Urn urn) { + try { + SchemaFieldEntity result = new SchemaFieldEntity(); + result.setUrn(urn.toString()); + result.setType(EntityType.SCHEMA_FIELD); + result.setFieldPath(urn.getEntityKey().get(1)); + Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); + result.setParent(UrnToEntityMapper.map(context, parentUrn)); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to load schemaField entity", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index b543a40cbac410..6017f368eea240 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -1,22 +1,40 @@ package com.linkedin.datahub.graphql.types.schemafield; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; -import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +@RequiredArgsConstructor public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType { - public SchemaFieldType() {} + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME); + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; @Override public EntityType type() { @@ -40,29 +58,41 @@ public List> batchLoad( urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - return schemaFieldUrns.stream() - .map(this::mapSchemaFieldUrn) + Map entities = new HashMap<>(); + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + entities = + _entityClient.batchGetV2( + SCHEMA_FIELD_ENTITY_NAME, + new HashSet<>(schemaFieldUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + } + + final List gmsResults = new ArrayList<>(); + for (Urn urn : schemaFieldUrns) { + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + gmsResults.add(entities.getOrDefault(urn, null)); + } else { + gmsResults.add( + new EntityResponse() + .setUrn(urn) + .setAspects(new EnvelopedAspectMap()) + .setEntityName(urn.getEntityType())); + } + } + + return gmsResults.stream() .map( - schemaFieldEntity -> - DataFetcherResult.newResult().data(schemaFieldEntity).build()) + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(SchemaFieldMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to load schemaField entity", e); } } - - private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { - try { - SchemaFieldEntity result = new SchemaFieldEntity(); - result.setUrn(urn.toString()); - result.setType(EntityType.SCHEMA_FIELD); - result.setFieldPath(urn.getEntityKey().get(1)); - Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(parentUrn)); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to load schemaField entity", e); - } - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java new file mode 100644 index 00000000000000..dc1ff7ca329714 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java @@ -0,0 +1,85 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StructuredPropertiesMapper { + + public static final StructuredPropertiesMapper INSTANCE = new StructuredPropertiesMapper(); + + public static com.linkedin.datahub.graphql.generated.StructuredProperties map( + @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { + return INSTANCE.apply(context, structuredProperties); + } + + public com.linkedin.datahub.graphql.generated.StructuredProperties apply( + @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { + com.linkedin.datahub.graphql.generated.StructuredProperties result = + new com.linkedin.datahub.graphql.generated.StructuredProperties(); + result.setProperties( + structuredProperties.getProperties().stream() + .map(p -> mapStructuredProperty(context, p)) + .collect(Collectors.toList())); + return result; + } + + private StructuredPropertiesEntry mapStructuredProperty( + @Nullable QueryContext context, StructuredPropertyValueAssignment valueAssignment) { + StructuredPropertiesEntry entry = new StructuredPropertiesEntry(); + entry.setStructuredProperty(createStructuredPropertyEntity(valueAssignment)); + final List values = new ArrayList<>(); + final List entities = new ArrayList<>(); + valueAssignment + .getValues() + .forEach( + value -> { + if (value.isString()) { + this.mapStringValue(context, value.getString(), values, entities); + } else if (value.isDouble()) { + values.add(new NumberValue(value.getDouble())); + } + }); + entry.setValues(values); + entry.setValueEntities(entities); + return entry; + } + + private StructuredPropertyEntity createStructuredPropertyEntity( + StructuredPropertyValueAssignment assignment) { + StructuredPropertyEntity entity = new StructuredPropertyEntity(); + entity.setUrn(assignment.getPropertyUrn().toString()); + entity.setType(EntityType.STRUCTURED_PROPERTY); + return entity; + } + + private static void mapStringValue( + @Nullable QueryContext context, + String stringValue, + List values, + List entities) { + try { + final Urn urnValue = Urn.createFromString(stringValue); + entities.add(UrnToEntityMapper.map(context, urnValue)); + } catch (Exception e) { + log.debug("String value is not an urn for this structured property entry"); + } + values.add(new StringValue(stringValue)); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java new file mode 100644 index 00000000000000..b3abab5ed3d367 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java @@ -0,0 +1,128 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AllowedValue; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyCardinality; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.TypeQualifier; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.PropertyValueArray; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class StructuredPropertyMapper + implements ModelMapper { + + private static final String ALLOWED_TYPES = "allowedTypes"; + + public static final StructuredPropertyMapper INSTANCE = new StructuredPropertyMapper(); + + public static StructuredPropertyEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public StructuredPropertyEntity apply( + @Nullable QueryContext queryContext, @Nonnull final EntityResponse entityResponse) { + final StructuredPropertyEntity result = new StructuredPropertyEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.STRUCTURED_PROPERTY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, (this::mapStructuredPropertyDefinition)); + return mappingHelper.getResult(); + } + + private void mapStructuredPropertyDefinition( + @Nonnull StructuredPropertyEntity extendedProperty, @Nonnull DataMap dataMap) { + com.linkedin.structured.StructuredPropertyDefinition gmsDefinition = + new com.linkedin.structured.StructuredPropertyDefinition(dataMap); + StructuredPropertyDefinition definition = new StructuredPropertyDefinition(); + definition.setQualifiedName(gmsDefinition.getQualifiedName()); + definition.setCardinality( + PropertyCardinality.valueOf(gmsDefinition.getCardinality().toString())); + definition.setValueType(createDataTypeEntity(gmsDefinition.getValueType())); + if (gmsDefinition.hasDisplayName()) { + definition.setDisplayName(gmsDefinition.getDisplayName()); + } + if (gmsDefinition.getDescription() != null) { + definition.setDescription(gmsDefinition.getDescription()); + } + if (gmsDefinition.hasAllowedValues()) { + definition.setAllowedValues(mapAllowedValues(gmsDefinition.getAllowedValues())); + } + if (gmsDefinition.hasTypeQualifier()) { + definition.setTypeQualifier(mapTypeQualifier(gmsDefinition.getTypeQualifier())); + } + definition.setEntityTypes( + gmsDefinition.getEntityTypes().stream() + .map(this::createEntityTypeEntity) + .collect(Collectors.toList())); + extendedProperty.setDefinition(definition); + } + + private List mapAllowedValues(@Nonnull PropertyValueArray gmsValues) { + List allowedValues = new ArrayList<>(); + gmsValues.forEach( + value -> { + final AllowedValue allowedValue = new AllowedValue(); + if (value.getValue().isString()) { + allowedValue.setValue(new StringValue(value.getValue().getString())); + } else if (value.getValue().isDouble()) { + allowedValue.setValue(new NumberValue(value.getValue().getDouble())); + } + if (value.getDescription() != null) { + allowedValue.setDescription(value.getDescription()); + } + allowedValues.add(allowedValue); + }); + return allowedValues; + } + + private DataTypeEntity createDataTypeEntity(final Urn dataTypeUrn) { + final DataTypeEntity dataType = new DataTypeEntity(); + dataType.setUrn(dataTypeUrn.toString()); + dataType.setType(EntityType.DATA_TYPE); + return dataType; + } + + private TypeQualifier mapTypeQualifier(final StringArrayMap gmsTypeQualifier) { + final TypeQualifier typeQualifier = new TypeQualifier(); + List allowedTypes = gmsTypeQualifier.get(ALLOWED_TYPES); + if (allowedTypes != null) { + typeQualifier.setAllowedTypes( + allowedTypes.stream().map(this::createEntityTypeEntity).collect(Collectors.toList())); + } + return typeQualifier; + } + + private EntityTypeEntity createEntityTypeEntity(final Urn entityTypeUrn) { + return createEntityTypeEntity(entityTypeUrn.toString()); + } + + private EntityTypeEntity createEntityTypeEntity(final String entityTypeUrnStr) { + final EntityTypeEntity entityType = new EntityTypeEntity(); + entityType.setUrn(entityTypeUrnStr); + entityType.setType(EntityType.ENTITY_TYPE); + return entityType; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java new file mode 100644 index 00000000000000..9d50b7d54e4ff8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java @@ -0,0 +1,79 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class StructuredPropertyType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.STRUCTURED_PROPERTY; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return StructuredPropertyEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List extendedPropertyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + STRUCTURED_PROPERTY_ENTITY_NAME, + new HashSet<>(extendedPropertyUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : extendedPropertyUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(StructuredPropertyMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index c56833cc817eb7..8760e62c8206f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -28,7 +28,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; @@ -89,7 +88,7 @@ public List> batchLoad( _entityClient.batchGetV2( TAG_ENTITY_NAME, new HashSet<>(tagUrns), null, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : tagUrns) { gmsResults.add(tagMap.getOrDefault(urn, null)); } @@ -98,7 +97,9 @@ public List> batchLoad( gmsTag -> gmsTag == null ? null - : DataFetcherResult.newResult().data(TagMapper.map(gmsTag)).build()) + : DataFetcherResult.newResult() + .data(TagMapper.map(context, gmsTag)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tags", e); @@ -116,14 +117,13 @@ public SearchResults search( final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "tag", query, facetFilters, start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -135,8 +135,8 @@ public AutoCompleteResults autoComplete( @Nonnull QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + _entityClient.autoComplete(context.getOperationContext(), "tag", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -144,9 +144,9 @@ public Tag update( @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = TagUpdateInputMapper.map(input, actor); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + TagUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); @@ -165,7 +165,7 @@ private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryConte final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.TAG_PRIVILEGES.getResourceType(), update.getUrn(), orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index 72665535e59808..cadeef99410345 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -3,37 +3,49 @@ import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Tag; +import java.util.Optional; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GlobalTagsMapper { public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(standardTags, entityUrn); + @Nullable final QueryContext context, + @Nonnull final GlobalTags standardTags, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, standardTags, entityUrn); } public com.linkedin.datahub.graphql.generated.GlobalTags apply( - @Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final GlobalTags input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); result.setTags( input.getTags().stream() - .map(tag -> this.mapTagAssociation(tag, entityUrn)) + .map(tag -> mapTagAssociation(context, tag, entityUrn)) + .filter(Optional::isPresent) + .map(Optional::get) .collect(Collectors.toList())); return result; } - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, @Nonnull final Urn entityUrn) { + private static Optional mapTagAssociation( + @Nullable final QueryContext context, + @Nonnull final TagAssociation input, + @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); final Tag resultTag = new Tag(); resultTag.setUrn(input.getTag().toString()); result.setTag(resultTag); result.setAssociatedUrn(entityUrn.toString()); - return result; + return Optional.of(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 3792a423760046..cb024fd6953f25 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -2,21 +2,26 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.TagUrn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.net.URISyntaxException; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class TagAssociationUpdateMapper implements ModelMapper { public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); + public static TagAssociation map( + @Nullable final QueryContext context, + @Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(context, tagAssociationUpdate); } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { + public TagAssociation apply( + @Nullable final QueryContext context, final TagAssociationUpdate tagAssociationUpdate) { final TagAssociation output = new TagAssociation(); try { output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index d6ce24582678d8..d7971d1788c037 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -7,6 +7,7 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; @@ -17,6 +18,7 @@ import com.linkedin.metadata.key.TagKey; import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -27,12 +29,14 @@ public class TagMapper implements ModelMapper { public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Tag map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { + public Tag apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Tag result = new Tag(); Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityResponse.getUrn().toString()); @@ -43,11 +47,12 @@ public Tag apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, TagMapper::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, TagMapper::mapTagProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, - (tag, dataMap) -> tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + (tag, dataMap) -> + tag.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); if (result.getProperties() != null && result.getProperties().getName() == null) { result.getProperties().setName(legacyName); @@ -56,12 +61,12 @@ public Tag apply(@Nonnull final EntityResponse entityResponse) { return mappingHelper.getResult(); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + private static void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { TagKey tagKey = new TagKey(dataMap); tag.setName(tagKey.getName()); } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + private static void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { final TagProperties properties = new TagProperties(dataMap); final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = new com.linkedin.datahub.graphql.generated.TagProperties.Builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 316994881ccfec..7e6b7052d683d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -13,6 +13,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.TagUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; @@ -21,6 +22,7 @@ import java.util.ArrayList; import java.util.Collection; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class TagUpdateInputMapper implements InputModelMapper, Urn> { @@ -28,13 +30,17 @@ public class TagUpdateInputMapper public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection map( - @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { - return INSTANCE.apply(tagUpdate, actor); + @Nullable final QueryContext context, + @Nonnull final TagUpdateInput tagUpdate, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, tagUpdate, actor); } @Override public Collection apply( - @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final TagUpdateInput tagUpdate, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(2); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(TAG_ENTITY_NAME); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index eefcc356c22a3c..8608dde75628c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -57,7 +57,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index e4e67c86f1ae6b..1bfeeaeea7c36f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.FieldUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class FieldUsageCountsMapper implements ModelMapper { @@ -10,12 +12,15 @@ public class FieldUsageCountsMapper public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); public static FieldUsageCounts map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { - return INSTANCE.apply(usageCounts); + return INSTANCE.apply(context, usageCounts); } @Override - public FieldUsageCounts apply(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public FieldUsageCounts apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { FieldUsageCounts result = new FieldUsageCounts(); result.setCount(usageCounts.getCount()); result.setFieldName(usageCounts.getFieldName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 3449c6782a46ba..32ba8f5b80325a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageAggregation; import com.linkedin.datahub.graphql.generated.WindowDuration; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageAggregationMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class UsageAggregationMapper public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); public static UsageAggregation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { - return INSTANCE.apply(pdlUsageAggregation); + return INSTANCE.apply(context, pdlUsageAggregation); } @Override public UsageAggregation apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); @@ -28,7 +32,8 @@ public UsageAggregation apply( result.setResource(pdlUsageAggregation.getResource().toString()); } if (pdlUsageAggregation.hasMetrics()) { - result.setMetrics(UsageAggregationMetricsMapper.map(pdlUsageAggregation.getMetrics())); + result.setMetrics( + UsageAggregationMetricsMapper.map(context, pdlUsageAggregation.getMetrics())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index ff9f6fd5c48551..47411d65c73290 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageAggregationMetrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageAggregationMetricsMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class UsageAggregationMetricsMapper public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); public static UsageAggregationMetrics map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { - return INSTANCE.apply(usageAggregationMetrics); + return INSTANCE.apply(context, usageAggregationMetrics); } @Override public UsageAggregationMetrics apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); @@ -25,13 +29,13 @@ public UsageAggregationMetrics apply( if (usageAggregationMetrics.hasFields()) { result.setFields( usageAggregationMetrics.getFields().stream() - .map(FieldUsageCountsMapper::map) + .map(f -> FieldUsageCountsMapper.map(context, f)) .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { result.setUsers( usageAggregationMetrics.getUsers().stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .map(aggregation -> UserUsageCountsMapper.map(context, aggregation)) .collect(Collectors.toList())); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index 63fe051b7ede96..c40126ca325515 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageQueryResultAggregationMapper implements ModelMapper< @@ -13,12 +15,14 @@ public class UsageQueryResultAggregationMapper new UsageQueryResultAggregationMapper(); public static UsageQueryResultAggregations map( + @Nullable final QueryContext context, @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { - return INSTANCE.apply(pdlUsageResultAggregations); + return INSTANCE.apply(context, pdlUsageResultAggregations); } @Override public UsageQueryResultAggregations apply( + @Nullable final QueryContext context, @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); @@ -26,13 +30,13 @@ public UsageQueryResultAggregations apply( if (pdlUsageResultAggregations.hasFields()) { result.setFields( pdlUsageResultAggregations.getFields().stream() - .map(FieldUsageCountsMapper::map) + .map(f -> FieldUsageCountsMapper.map(context, f)) .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { result.setUsers( pdlUsageResultAggregations.getUsers().stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .map(aggregation -> UserUsageCountsMapper.map(context, aggregation)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index 444605cd99377a..eef476959c5fec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,31 +1,41 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageQueryResult; +import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageQueryResultMapper implements ModelMapper { + public static final UsageQueryResult EMPTY = + new UsageQueryResult(List.of(), new UsageQueryResultAggregations(0, List.of(), List.of(), 0)); + public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); public static UsageQueryResult map( + @Nullable final QueryContext context, @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { - return INSTANCE.apply(pdlUsageResult); + return INSTANCE.apply(context, pdlUsageResult); } @Override - public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public UsageQueryResult apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { result.setAggregations( - UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + UsageQueryResultAggregationMapper.map(context, pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { result.setBuckets( pdlUsageResult.getBuckets().stream() - .map(bucket -> UsageAggregationMapper.map(bucket)) + .map(bucket -> UsageAggregationMapper.map(context, bucket)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index 014003dd865540..783d44d4863689 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UserUsageCountsMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class UserUsageCountsMapper public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); public static UserUsageCounts map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { - return INSTANCE.apply(pdlUsageResultAggregations); + return INSTANCE.apply(context, pdlUsageResultAggregations); } @Override public UserUsageCounts apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index 8ea06f46d51339..be27f9b0f3c011 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; import com.linkedin.datahub.graphql.generated.DataHubViewFilter; @@ -11,8 +12,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilter; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; @@ -32,12 +33,14 @@ public class DataHubViewMapper implements ModelMapper> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataHubViewMapper.map(gmsResult)) + .data(DataHubViewMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index 4b837605d4e318..677ad8afbaca31 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -13,6 +13,10 @@ public DateTime getStartOfNextWeek() { return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); } + public DateTime getStartOfThisMonth() { + return setTimeToZero(getNow().withDayOfMonth(1)); + } + public DateTime getStartOfNextMonth() { return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); } diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 075a3b0fac43bc..39cff0f5114bfa 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -212,6 +212,16 @@ type VisualConfig { """ faviconUrl: String + """ + Custom app title to show in the browser tab + """ + appTitle: String + + """ + Boolean flag disabling viewing the Business Glossary page for users without the 'Manage Glossaries' privilege + """ + hideGlossary: Boolean + """ Configuration for the queries tab """ @@ -437,10 +447,19 @@ type FeatureFlagsConfig { """ showBrowseV2: Boolean! + """ + Whether browse v2 is platform mode, which means that platforms are displayed instead of entity types at the root. + """ + platformBrowseV2: Boolean! + """ Whether we should show CTAs in the UI related to moving to Managed DataHub by Acryl. """ showAcrylInfo: Boolean! + """ + Whether ERModelRelationship Tables Feature should be shown. + """ + erModelRelationshipFeatureEnabled: Boolean! """ Whether we should show AccessManagement tab in the datahub UI. diff --git a/datahub-graphql-core/src/main/resources/auth.graphql b/datahub-graphql-core/src/main/resources/auth.graphql index b76aa132c219c9..83c63b6be18b87 100644 --- a/datahub-graphql-core/src/main/resources/auth.graphql +++ b/datahub-graphql-core/src/main/resources/auth.graphql @@ -11,6 +11,12 @@ extend type Query { List access tokens stored in DataHub. """ listAccessTokens(input: ListAccessTokenInput!): ListAccessTokenResult! + + """ + Fetches the metadata of an access token. + This is useful to debug when you have a raw token but don't know the actor. + """ + getAccessTokenMetadata(token: String!): AccessTokenMetadata! } extend type Mutation { @@ -268,4 +274,9 @@ type EntityPrivileges { Whether or not a user can update the Queries for the entity (e.g. dataset) """ canEditQueries: Boolean + + """ + Whether or not a user can update the properties for the entity (e.g. dataset) + """ + canEditProperties: Boolean } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index feb344154d11e4..b750d206261018 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -73,6 +73,11 @@ type Query { """ role(urn: String!): Role + """ + Fetch a ERModelRelationship by primary key (urn) + """ + erModelRelationship(urn: String!): ERModelRelationship + """ Fetch a Glossary Term by primary key (urn) """ @@ -233,6 +238,140 @@ type Query { dataPlatformInstance(urn: String!): DataPlatformInstance } +""" +An ERModelRelationship is a high-level abstraction that dictates what datasets fields are erModelRelationshiped. +""" +type ERModelRelationship implements EntityWithRelationships & Entity { + """ + The primary key of the role + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Unique id for the erModelRelationship + """ + id: String! + + """ + An additional set of read only properties + """ + properties: ERModelRelationshipProperties + + """ + An additional set of of read write properties + """ + editableProperties: ERModelRelationshipEditableProperties + + """ + References to internal resources related to the dataset + """ + institutionalMemory: InstitutionalMemory + + """ + Ownership metadata of the dataset + """ + ownership: Ownership + + """ + Status of the Dataset + """ + status: Status + + """ + Tags used for searching dataset + """ + tags: GlobalTags + + """ + The structured glossary terms associated with the dataset + """ + glossaryTerms: GlossaryTerms + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges + + """ + No-op required for the model + """ + lineage(input: LineageInput!): EntityLineageResult +} + +""" +Additional properties about a ERModelRelationship +""" +type ERModelRelationshipEditableProperties { + + """ + Documentation of the ERModelRelationship + """ + description: String + """ + Display name of the ERModelRelationship + """ + name: String +} + +""" +Additional properties about a ERModelRelationship +""" +type ERModelRelationshipProperties { + + """ + The name of the ERModelRelationship used in display + """ + name: String! + """ + The urn of source + """ + source: Dataset! + + """ + The urn of destination + """ + destination: Dataset! + + """ + The relationFieldMappings + """ + relationshipFieldMappings: [RelationshipFieldMapping!] + + """ + Created timestamp millis associated with the ERModelRelationship + """ + createdTime: Long + + """ + Created actor urn associated with the ERModelRelationship + """ + createdActor: Entity +} + +""" +ERModelRelationship FieldMap +""" +type RelationshipFieldMapping { + """ + left field + """ + sourceField: String! + """ + bfield + """ + destinationField: String! +} + """ Root type used for updating DataHub Metadata Coming soon createEntity, addOwner, removeOwner mutations @@ -467,6 +606,31 @@ type Mutation { """ unsetDomain(entityUrn: String!): Boolean + """ + Create a ERModelRelationship + """ + createERModelRelationship( + "Input required to create a new ERModelRelationship" + input: ERModelRelationshipUpdateInput!): ERModelRelationship + + """ + Update a ERModelRelationship + """ + updateERModelRelationship( + "The urn of the ERModelRelationship to update" + urn: String!, + "Input required to updat an existing DataHub View" + input: ERModelRelationshipUpdateInput!): Boolean + + """ + Delete a ERModelRelationship + """ + deleteERModelRelationship( + "The urn of the ERModelRelationship to delete" + urn: String!): Boolean + + + """ Sets the Deprecation status for a Metadata Entity. Requires the Edit Deprecation status privilege for an entity. """ @@ -582,6 +746,11 @@ type Mutation { """ createPost(input: CreatePostInput!): Boolean + """ + Update or edit a post + """ + updatePost(input: UpdatePostInput!): Boolean + """ Delete a post """ @@ -695,6 +864,31 @@ type Mutation { deleteOwnershipType( "Urn of the Custom Ownership Type to remove." urn: String!, deleteReferences: Boolean): Boolean + + """ + Submit a response to a prompt from a form collecting metadata on different entities. + Provide the urn of the entity you're submitting a form response as well as the required input. + """ + submitFormPrompt(urn: String!, input: SubmitFormPromptInput!): Boolean + + """ + Assign a form to different entities. This will be a patch by adding this form to the list + of forms on an entity. + """ + batchAssignForm(input: BatchAssignFormInput!): Boolean + + """ + Creates a filter for a form to apply it to certain entities. Entities that match this filter will have + a given form applied to them. + This feature is ONLY supported in Acryl DataHub. + """ + createDynamicFormAssignment(input: CreateDynamicFormAssignmentInput!): Boolean + + """ + Verifies a form on an entity when all of the required questions on the form are complete and the form + is of type VERIFICATION. + """ + verifyForm(input: VerifyFormInput!): Boolean } """ @@ -756,6 +950,11 @@ enum EntityType { """ DATA_PLATFORM + """ + The ERModelRelationship Entity + """ + ER_MODEL_RELATIONSHIP + """ The Dashboard Entity """ @@ -901,10 +1100,45 @@ enum EntityType { """ CUSTOM_OWNERSHIP_TYPE + """ + A DataHub incident - SaaS only + """ + INCIDENT + """" A Role from an organisation """ ROLE + + """" + An structured property on entities + """ + STRUCTURED_PROPERTY + + """" + A form entity on entities + """ + FORM + + """" + A data type registered to DataHub + """ + DATA_TYPE + + """" + A type of entity registered to DataHub + """ + ENTITY_TYPE + + """" + A type of entity that is restricted to the user + """ + RESTRICTED + + """ + Another entity type - refer to a provided entity type urn. + """ + OTHER } """ @@ -1279,6 +1513,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { """ domain: DomainAssociation + """ + The forms associated with the Dataset + """ + forms: Forms + """ The Roles and the properties to access the dataset """ @@ -1309,7 +1548,7 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { """ Assertions associated with the Dataset """ - assertions(start: Int, count: Int): EntityAssertionsResult + assertions(start: Int, count: Int, includeSoftDeleted: Boolean): EntityAssertionsResult """ Edges extending from this entity @@ -1421,6 +1660,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Structured properties about this Dataset + """ + structuredProperties: StructuredProperties } type RoleAssociation { @@ -1491,7 +1735,7 @@ type RoleUser { type RoleProperties { """ - Name of the Role in an organisation + Name of the Role in an organisation """ name: String! @@ -1514,6 +1758,8 @@ type RoleProperties { type FineGrainedLineage { upstreams: [SchemaFieldRef!] downstreams: [SchemaFieldRef!] + query: String + transformOperation: String } """ @@ -1524,6 +1770,7 @@ type SiblingProperties { If this entity is the primary sibling among the sibling set """ isPrimary: Boolean + """ The sibling entities """ @@ -1631,7 +1878,7 @@ type VersionedDataset implements Entity { domain: DomainAssociation """ - Experimental! The resolved health status of the Dataset + Experimental! The resolved health status of the asset """ health: [Health!] @@ -1697,6 +1944,12 @@ input AspectParams { Only fetch auto render aspects """ autoRenderOnly: Boolean + + """ + Fetch using aspect names + If absent, returns all aspects matching other inputs + """ + aspectNames: [String!] } @@ -1905,6 +2158,22 @@ type GlossaryTerm implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -2042,6 +2311,22 @@ type GlossaryNode implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -2071,6 +2356,11 @@ type GlossaryNodeProperties { Description of the glossary term """ description: String + + """ + Custom properties of the Glossary Node + """ + customProperties: [CustomPropertiesEntry!] } """ @@ -2442,6 +2732,27 @@ type Container implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -2817,12 +3128,32 @@ type SchemaFieldEntity implements Entity { """ parent: Entity! + """ + Structured properties on this schema field + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + """ Granular API for querying edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult } +""" +Object containing structured properties for an entity +""" +type StructuredProperties { + """ + Structured properties on this entity + """ + properties: [StructuredPropertiesEntry!] +} + """ Information about an individual field in a Dataset schema """ @@ -2892,6 +3223,16 @@ type SchemaField { Whether the field is part of a partitioning key schema """ isPartitioningKey: Boolean + + """ + For schema fields that have other properties that are not modeled explicitly, represented as a JSON string. + """ + jsonProps: String + + """ + Schema field entity that exist in the database for this schema field + """ + schemaFieldEntity: SchemaFieldEntity } """ @@ -3019,6 +3360,12 @@ type ViewProperties { """ logic: String! + """ + A formatted version of the logic associated with the view. + For dbt, this contains the compiled SQL. + """ + formattedLogic: String + """ The language in which the view logic is written, for example SQL """ @@ -3430,26 +3777,52 @@ type CorpUser implements Entity { """ globalTags: GlobalTags @deprecated + """ + Whether or not this entity exists on DataHub + """ + exists: Boolean + """ Settings that a user can customize through the datahub ui """ settings: CorpUserSettings -} -""" -Settings that a user can customize through the datahub ui -""" -type CorpUserSettings { """ - Settings that control look and feel of the DataHub UI for the user + Experimental API. + For fetching extra aspects that do not have custom UI code yet """ - appearance: CorpUserAppearanceSettings + aspects(input: AspectParams): [RawAspect!] """ - Settings related to the DataHub Views feature + Structured properties about this asset """ - views: CorpUserViewsSettings -} + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges +} + +""" +Settings that a user can customize through the datahub ui +""" +type CorpUserSettings { + """ + Settings that control look and feel of the DataHub UI for the user + """ + appearance: CorpUserAppearanceSettings + + """ + Settings related to the DataHub Views feature + """ + views: CorpUserViewsSettings +} """ Settings that control look and feel of the DataHub UI for the user @@ -3794,6 +4167,27 @@ type CorpGroup implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -3878,6 +4272,11 @@ type CorpGroupEditableProperties { Email address for the group """ email: String + + """ + A URL which points to a picture which user wants to set as a profile photo + """ + pictureLink: String } """ @@ -3898,6 +4297,11 @@ input CorpGroupUpdateInput { Email address for the group """ email: String + + """ + A URL which points to a picture which user wants to set as a profile photo + """ + pictureLink: String } """ @@ -3995,6 +4399,12 @@ type Tag implements Entity { Deprecated, use properties.description field instead """ description: String @deprecated + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4089,6 +4499,11 @@ type GlossaryTermAssociation { """ term: GlossaryTerm! + """ + The actor who is responsible for the term being added" + """ + actor: CorpUser + """ Reference back to the associated urn for tracking purposes e.g. when sibling nodes are merged together """ @@ -4317,6 +4732,21 @@ input DatasetEditablePropertiesUpdate { description: String! } +""" +Update to writable Dataset fields +""" +input ERModelRelationshipEditablePropertiesUpdate { + """ + Display name of the ERModelRelationship + """ + name: String + + """ + Writable description for ERModelRelationship + """ + description: String! +} + """ Update to writable Chart fields """ @@ -4435,6 +4865,68 @@ input CreateTagInput { description: String } +""" +Input required to create/update a new ERModelRelationship +""" +input ERModelRelationshipUpdateInput { + """ + Details about the ERModelRelationship + """ + properties: ERModelRelationshipPropertiesInput + """ + Update to editable properties + """ + editableProperties: ERModelRelationshipEditablePropertiesUpdate +} + +""" +Details about the ERModelRelationship +""" +input ERModelRelationshipPropertiesInput { + """ + Details about the ERModelRelationship + """ + name: String! + """ + Details about the ERModelRelationship + """ + source: String! + """ + Details about the ERModelRelationship + """ + destination: String! + """ + Details about the ERModelRelationship + """ + relationshipFieldmappings: [RelationshipFieldMappingInput!] + """ + optional flag about the ERModelRelationship is getting create + """ + created: Boolean + """ + optional field to prevent created time while the ERModelRelationship is getting update + """ + createdAt: Long + """ + optional field to prevent create actor while the ERModelRelationship is getting update + """ + createdBy: String +} + +""" +Details about the ERModelRelationship +""" +input RelationshipFieldMappingInput { + """ + Details about the ERModelRelationship + """ + sourceField: String + """ + Details about the ERModelRelationship + """ + destinationField: String +} + """ An update for the ownership information for a Metadata Entity """ @@ -4625,6 +5117,12 @@ type Notebook implements Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4945,6 +5443,27 @@ type Dashboard implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -5255,6 +5774,27 @@ type Chart implements EntityWithRelationships & Entity & BrowsableEntity { Sub Types that this entity implements """ subTypes: SubTypes + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -5612,6 +6152,32 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -5812,6 +6378,27 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -5884,6 +6471,11 @@ type DataProcessRunEvent implements TimeSeriesAspect { The timestamp associated with the run event in milliseconds """ timestampMillis: Long! + + """ + The duration of the run in milliseconds + """ + durationMillis: Long } """ @@ -6548,10 +7140,10 @@ type PartitionSpec { """ The partition identifier """ - partition: String! + partition: String """ - The optional time window partition information + The optional time window partition information - required if type is TIMESTAMP_FIELD. """ timePartition: TimeWindow } @@ -6577,7 +7169,6 @@ type TimeWindow { durationMillis: Long! } - """ An assertion represents a programmatic validation, check, or test performed periodically against another Entity. """ @@ -6622,6 +7213,17 @@ type Assertion implements EntityWithRelationships & Entity { Edges extending from this entity grouped by direction in the lineage graph """ lineage(input: LineageInput!): EntityLineageResult + + """ + Status metadata of the assertion + """ + status: Status + + """ + Experimental API. + For fetching extra aspects that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -6637,6 +7239,11 @@ type AssertionInfo { Dataset-specific assertion information """ datasetAssertion: DatasetAssertionInfo + + """ + An optional human-readable description of the assertion + """ + description: String } """ @@ -7038,10 +7645,29 @@ type AssertionStdParameter { The type of an AssertionStdParameter """ enum AssertionStdParameterType { + """ + A string value + """ STRING + + """ + A numeric value + """ NUMBER + + """ + A list of values. When used, the value should be formatted as a serialized JSON array. + """ LIST + + """ + A set of values. When used, the value should be formatted as a serialized JSON array. + """ SET + + """ + A value of unknown type + """ UNKNOWN } @@ -8358,6 +8984,11 @@ input ListPoliciesInput { Optional search query """ query: String + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters) + """ + orFilters: [AndFilterInput!] } """ @@ -8728,6 +9359,27 @@ type MLModel implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -8839,6 +9491,27 @@ type MLModelGroup implements EntityWithRelationships & Entity & BrowsableEntity Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLModelGroupProperties { @@ -8963,6 +9636,27 @@ type MLFeature implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLHyperParam { @@ -9132,6 +9826,27 @@ type MLPrimaryKey implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLPrimaryKeyProperties { @@ -9259,6 +9974,27 @@ type MLFeatureTable implements EntityWithRelationships & Entity & BrowsableEntit Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLFeatureTableEditableProperties { @@ -9567,6 +10303,22 @@ enum CostType { ORG_COST_TYPE } + +""" +Audit stamp containing a resolved actor +""" +type ResolvedAuditStamp { + """ + When the audited action took place + """ + time: Long! + + """ + Who performed the audited action + """ + actor: CorpUser +} + type SubTypes { """ The sub-types that this entity implements. e.g. Datasets that are views will implement the "view" subtype @@ -9634,6 +10386,27 @@ type Domain implements Entity { Edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -9881,6 +10654,11 @@ enum HealthStatusType { Assertions status """ ASSERTIONS + + """ + Incidents status + """ + INCIDENTS } """ @@ -10129,6 +10907,12 @@ type DataHubRole implements Entity { The description of the Role """ description: String! + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -10211,6 +10995,26 @@ input CreatePostInput { content: UpdatePostContentInput! } +""" +Input provided when creating a Post +""" +input UpdatePostInput { + """ + The urn of the post to edit or update + """ + urn: String!, + + """ + The type of post + """ + postType: PostType! + + """ + The content of the post + """ + content: UpdatePostContentInput! +} + """ Input provided for filling in a post content """ @@ -10680,6 +11484,11 @@ enum QuerySource { The query was provided manually, e.g. from the UI. """ MANUAL + + """ + The query was extracted by the system, e.g. from a dashboard. + """ + SYSTEM } """ @@ -10730,6 +11539,11 @@ type QueryProperties { An Audit Stamp corresponding to the update of this resource """ lastModified: AuditStamp! + + """ + The asset that this query originated from, e.g. a View, a dbt Model, etc. + """ + origin: Entity } """ @@ -10770,6 +11584,11 @@ type QueryEntity implements Entity { Granular API for querying edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Platform from which the Query was detected + """ + platform: DataPlatform } """ @@ -10985,6 +11804,27 @@ type DataProduct implements Entity { Tags used for searching Data Product """ tags: GlobalTags + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -11055,6 +11895,10 @@ input CreateDataProductInput { The primary key of the Domain """ domainUrn: String! + """ + An optional id for the new data product + """ + id: String } """ @@ -11236,3 +12080,120 @@ input UpdateOwnershipTypeInput { """ description: String } + +""" +Describes a generic filter on a dataset +""" +type DatasetFilter { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +Type of partition +""" +enum DatasetFilterType { + """ + Use a SQL string to apply the filter + """ + SQL +} + + +""" +Input required to create or update a DatasetFilter +""" +input DatasetFilterInput { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +An entity type registered in DataHub +""" +type EntityTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: EntityTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual entity type +""" +type EntityTypeInfo { + """ + The standard entity type + """ + type: EntityType! + + """ + The fully qualified name of the entity type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} + +""" +A restricted entity that the user does not have full permissions to view. +This entity type does not relate to an entity type in the database. +""" +type Restricted implements Entity & EntityWithRelationships { + """ + The primary key of the restricted entity + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Edges extending from this entity grouped by direction in the lineage graph + """ + lineage(input: LineageInput!): EntityLineageResult +} diff --git a/datahub-graphql-core/src/main/resources/forms.graphql b/datahub-graphql-core/src/main/resources/forms.graphql new file mode 100644 index 00000000000000..f5e5fa74e3dc97 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/forms.graphql @@ -0,0 +1,400 @@ +extend type Mutation { + """ + Remove a form from a given list of entities. + """ + batchRemoveForm(input: BatchRemoveFormInput!): Boolean! +} + +""" +Requirements forms that are assigned to an entity. +""" +type Forms { + """ + Forms that are still incomplete. + """ + incompleteForms: [FormAssociation!]! + + """ + Forms that have been completed. + """ + completedForms: [FormAssociation!]! + + """ + Verifications that have been applied to the entity via completed forms. + """ + verifications: [FormVerificationAssociation!]! +} + +type FormAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + Reference back to the urn with the form on it for tracking purposes e.g. when sibling nodes are merged together + """ + associatedUrn: String! + + """ + The prompt that still need to be completed for this form + """ + incompletePrompts: [FormPromptAssociation!] + + """ + The prompt that are already completed for this form + """ + completedPrompts: [FormPromptAssociation!] +} + +""" +Verification object that has been applied to the entity via a completed form. +""" +type FormVerificationAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + When this verification was applied to this entity + """ + lastModified: ResolvedAuditStamp +} + +""" +A form that helps with filling out metadata on an entity +""" +type FormPromptAssociation { + """ + The unique id of the form prompt + """ + id: String! + + """ + When and by whom this form prompt has last been modified + """ + lastModified: ResolvedAuditStamp! + + """ + Optional information about the field-level prompt associations. + """ + fieldAssociations: FormPromptFieldAssociations +} + +""" +Information about the field-level prompt associations. +""" +type FormPromptFieldAssociations { + """ + If this form prompt is for fields, this will contain a list of completed associations per field + """ + completedFieldPrompts: [FieldFormPromptAssociation!] + + """ + If this form prompt is for fields, this will contain a list of incomlete associations per field + """ + incompleteFieldPrompts: [FieldFormPromptAssociation!] +} + +""" +An association for field-level form prompts +""" +type FieldFormPromptAssociation { + """ + The schema field path + """ + fieldPath: String! + + """ + When and by whom this form field-level prompt has last been modified + """ + lastModified: ResolvedAuditStamp! +} + +""" +A form that helps with filling out metadata on an entity +""" +type Form implements Entity { + """ + A primary key associated with the Form + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Information about this form + """ + info: FormInfo! + + """ + Ownership metadata of the form + """ + ownership: Ownership + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The type of a form. This is optional on a form entity +""" +enum FormType { + """ + This form is used for "verifying" entities as a state for governance and compliance + """ + VERIFICATION + + """ + This form is used to help with filling out metadata on entities + """ + COMPLETION +} + +""" +Properties about an individual Form +""" +type FormInfo { + """ + The name of this form + """ + name: String! + + """ + The description of this form + """ + description: String + + """ + The type of this form + """ + type: FormType! + + """ + The prompt for this form + """ + prompts: [FormPrompt!]! + + """ + The actors that are assigned to complete the forms for the associated entities. + """ + actors: FormActorAssignment! +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type FormPrompt { + """ + The ID of this prompt. This will be globally unique. + """ + id: String! + + """ + The title of this prompt + """ + title: String! + + """ + The urn of the parent form that this prompt is part of + """ + formUrn: String! + + """ + The description of this prompt + """ + description: String + + """ + The description of this prompt + """ + type: FormPromptType! + + """ + Whether the prompt is required for the form to be considered completed. + """ + required: Boolean! + + """ + The params for this prompt if type is STRUCTURED_PROPERTY + """ + structuredPropertyParams: StructuredPropertyParams +} + +""" +Enum of all form prompt types +""" +enum FormPromptType { + """ + A structured property form prompt type. + """ + STRUCTURED_PROPERTY + """ + A schema field-level structured property form prompt type. + """ + FIELDS_STRUCTURED_PROPERTY +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type StructuredPropertyParams { + """ + The structured property required for the prompt on this entity + """ + structuredProperty: StructuredPropertyEntity! +} + +""" +Input for responding to a singular prompt in a form +""" +input SubmitFormPromptInput { + """ + The unique ID of the prompt this input is responding to + """ + promptId: String! + + """ + The urn of the form that this prompt is a part of + """ + formUrn: String! + + """ + The type of prompt that this input is responding to + """ + type: FormPromptType! + + """ + The fieldPath on a schema field that this prompt submission is association with. + This should be provided when the prompt is type FIELDS_STRUCTURED_PROPERTY + """ + fieldPath: String + + """ + The structured property required for the prompt on this entity + """ + structuredPropertyParams: StructuredPropertyInputParams +} + + +""" +Input for collecting structured property values to apply to entities +""" +input PropertyValueInput { + """ + The string value for this structured property + """ + stringValue: String + + """ + The number value for this structured property + """ + numberValue: Float +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +input StructuredPropertyInputParams { + """ + The urn of the structured property being applied to an entity + """ + structuredPropertyUrn: String! + + """ + The list of values you want to apply on this structured property to an entity + """ + values: [PropertyValueInput!]! +} + +""" +Input for batch assigning a form to different entities +""" +input BatchAssignFormInput { + """ + The urn of the form being assigned to entities + """ + formUrn: String! + + """ + The entities that this form is being assigned to + """ + entityUrns: [String!]! +} + +""" +Input for batch assigning a form to different entities +""" +input CreateDynamicFormAssignmentInput { + """ + The urn of the form being assigned to entities that match some criteria + """ + formUrn: String! + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters). + Entities that match this filter will have this form applied to them. + Currently, we only support a set of fields to filter on and they are: + (1) platform (2) subType (3) container (4) _entityType (5) domain + """ + orFilters: [AndFilterInput!]! +} + +type FormActorAssignment { + """ + Whether the form should be completed by owners of the assets which the form is applied to. + """ + owners: Boolean! + + """ + Urns of the users that the form is assigned to. If null, then no users are specifically targeted. + """ + users: [CorpUser!] + + """ + Groups that the form is assigned to. If null, then no groups are specifically targeted. + """ + groups: [CorpGroup!] + + """ + Whether or not the current actor is universally assigned to this form, either by user or by group. + Note that this does not take into account entity ownership based assignment. + """ + isAssignedToMe: Boolean! +} + +""" +Input for verifying forms on entities +""" +input VerifyFormInput { + """ + The urn of the form being verified on an entity + """ + formUrn: String! + + """ + The urn of the entity that is having a form verified on it + """ + entityUrn: String! +} + +""" +Input for batch removing a form from different entities +""" +input BatchRemoveFormInput { + """ + The urn of the form being removed from entities + """ + formUrn: String! + + """ + The entities that this form is being removed from + """ + entityUrns: [String!]! +} diff --git a/datahub-graphql-core/src/main/resources/incident.graphql b/datahub-graphql-core/src/main/resources/incident.graphql new file mode 100644 index 00000000000000..c3f4f35be608dd --- /dev/null +++ b/datahub-graphql-core/src/main/resources/incident.graphql @@ -0,0 +1,340 @@ +extend type Mutation { + """ + Create a new incident for a resource (asset) + """ + raiseIncident( + """ + Input required to create a new incident + """ + input: RaiseIncidentInput!): String + + """ + Update an existing incident for a resource (asset) + """ + updateIncidentStatus( + """ + The urn for an existing incident + """ + urn: String! + + """ + Input required to update the state of an existing incident + """ + input: UpdateIncidentStatusInput!): Boolean +} + +""" +A list of Incidents Associated with an Entity +""" +type EntityIncidentsResult { + """ + The starting offset of the result set returned + """ + start: Int! + + """ + The number of assertions in the returned result set + """ + count: Int! + + """ + The total number of assertions in the result set + """ + total: Int! + + """ + The incidents themselves + """ + incidents: [Incident!]! +} + +""" +An incident represents an active issue on a data asset. +""" +type Incident implements Entity { + """ + The primary key of the Incident + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + The type of incident + """ + incidentType: IncidentType! + + """ + A custom type of incident. Present only if type is 'CUSTOM' + """ + customType: String + + """ + An optional title associated with the incident + """ + title: String + + """ + An optional description associated with the incident + """ + description: String + + """ + The status of an incident + """ + status: IncidentStatus! + + """ + Optional priority of the incident. Lower value indicates higher priority. + """ + priority: Int + + """ + The entity that the incident is associated with. + """ + entity: Entity! + + """ + The source of the incident, i.e. how it was generated + """ + source: IncidentSource + + """ + The time at which the incident was initially created + """ + created: AuditStamp! + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The state of an incident. +""" +enum IncidentState { + """ + The incident is ongoing, or active. + """ + ACTIVE + """ + The incident is resolved. + """ + RESOLVED +} + +""" +A specific type of incident +""" +enum IncidentType { + """ + An operational incident, e.g. failure to materialize a dataset, or failure to execute a task / pipeline. + """ + OPERATIONAL + + """ + A custom type of incident + """ + CUSTOM +} + + +""" +Details about the status of an asset incident +""" +type IncidentStatus { + """ + The state of the incident + """ + state: IncidentState! + """ + An optional message associated with the status + """ + message: String + """ + The time that the status last changed + """ + lastUpdated: AuditStamp! +} + +""" +The source type of an incident, implying how it was created. +""" +enum IncidentSourceType { + """ + The incident was created manually, from either the API or the UI. + """ + MANUAL +} + +""" +Details about the source of an incident, e.g. how it was created. +""" +type IncidentSource { + """ + The type of the incident source + """ + type: IncidentSourceType! + + """ + The source of the incident. If the source type is ASSERTION_FAILURE, this will have the assertion that generated the incident. + """ + source: Entity +} + +""" +Input required to create a new incident in the 'Active' state. +""" +input RaiseIncidentInput { + """ + The type of incident + """ + type: IncidentType! + """ + A custom type of incident. Present only if type is 'CUSTOM' + """ + customType: String + """ + An optional title associated with the incident + """ + title: String + """ + An optional description associated with the incident + """ + description: String + """ + The resource (dataset, dashboard, chart, dataFlow, etc) that the incident is associated with. + """ + resourceUrn: String! + """ + The source of the incident, i.e. how it was generated + """ + source: IncidentSourceInput + """ + An optional priority for the incident. Lower value indicates a higher priority. + """ + priority: Int +} + +""" +Input required to create an incident source +""" +input IncidentSourceInput { + """ + The type of the incident source + """ + type: IncidentSourceType! +} + +""" +Input required to update status of an existing incident +""" +input UpdateIncidentStatusInput { + """ + The new state of the incident + """ + state: IncidentState! + """ + An optional message associated with the new state + """ + message: String +} + +extend type Dataset { + """ + Incidents associated with the Dataset + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type DataJob { + """ + Incidents associated with the DataJob + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type DataFlow { + """ + Incidents associated with the DataFlow + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type Dashboard { + """ + Incidents associated with the Dashboard + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type Chart { + """ + Incidents associated with the Chart + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/ingestion.graphql b/datahub-graphql-core/src/main/resources/ingestion.graphql index 21f9fb2633119b..d65343c0a16d2b 100644 --- a/datahub-graphql-core/src/main/resources/ingestion.graphql +++ b/datahub-graphql-core/src/main/resources/ingestion.graphql @@ -36,6 +36,11 @@ extend type Mutation { """ createSecret(input: CreateSecretInput!): String + """ + Update a Secret + """ + updateSecret(input: UpdateSecretInput!): String + """ Delete a Secret """ @@ -560,6 +565,31 @@ input CreateSecretInput { description: String } +""" +Input arguments for updating a Secret +""" +input UpdateSecretInput { + """ + The primary key of the Secret to update + """ + urn: String! + + """ + The name of the secret for reference in ingestion recipes + """ + name: String! + + """ + The value of the secret, to be encrypted and stored + """ + value: String! + + """ + An optional description for the secret + """ + description: String +} + """ Input arguments for retrieving the plaintext values of a set of secrets """ diff --git a/datahub-graphql-core/src/main/resources/properties.graphql b/datahub-graphql-core/src/main/resources/properties.graphql new file mode 100644 index 00000000000000..3bf0bbefc406d7 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/properties.graphql @@ -0,0 +1,265 @@ +extend type Mutation { + """ + Upsert structured properties onto a given asset + """ + upsertStructuredProperties(input: UpsertStructuredPropertiesInput!): StructuredProperties! +} + +""" +A structured property that can be shared between different entities +""" +type StructuredPropertyEntity implements Entity { + """ + A primary key associated with the structured property + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Definition of this structured property including its name + """ + definition: StructuredPropertyDefinition! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual Query +""" +type StructuredPropertyDefinition { + """ + The fully qualified name of the property. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this structured property + """ + displayName: String + + """ + The description of this property + """ + description: String + + """ + The cardinality of a Structured Property determining whether one or multiple values + can be applied to the entity from this property. + """ + cardinality: PropertyCardinality + + """ + A list of allowed values that the property is allowed to take. + """ + allowedValues: [AllowedValue!] + + """ + The type of this structured property + """ + valueType: DataTypeEntity! + + """ + Allows for type specialization of the valueType to be more specific about which + entity types are allowed, for example. + """ + typeQualifier: TypeQualifier + + """ + Entity types that this structured property can be applied to + """ + entityTypes: [EntityTypeEntity!]! +} + +""" +An entry for an allowed value for a structured property +""" +type AllowedValue { + """ + The allowed value + """ + value: PropertyValue! + + """ + The description of this allowed value + """ + description: String +} + +""" +The cardinality of a Structured Property determining whether one or multiple values +can be applied to the entity from this property. +""" +enum PropertyCardinality { + """ + Only one value of this property can applied to an entity + """ + SINGLE + + """ + Multiple values of this property can applied to an entity + """ + MULTIPLE +} + +""" +Allows for type specialization of the valueType to be more specific about which +entity types are allowed, for example. +""" +type TypeQualifier { + """ + The list of allowed entity types + """ + allowedTypes: [EntityTypeEntity!] +} + +""" +String property value +""" +type StringValue { + """ + The value of a string type property + """ + stringValue: String! +} + +""" +Numeric property value +""" +type NumberValue { + """ + The value of a number type property + """ + numberValue: Float! +} + +""" +The value of a property +""" +union PropertyValue = StringValue | NumberValue + +""" +An entry in an structured properties list represented as a tuple +""" +type StructuredPropertiesEntry { + """ + The key of the map entry + """ + structuredProperty: StructuredPropertyEntity! + + """ + The values of the structured property for this entity + """ + values: [PropertyValue]! + + """ + The optional entities associated with the values if the values are entity urns + """ + valueEntities: [Entity] +} + +""" +Input for upserting structured properties on a given asset +""" +input UpsertStructuredPropertiesInput { + """ + The urn of the asset that we are updating + """ + assetUrn: String! + + """ + The list of structured properties you want to upsert on this asset + """ + structuredPropertyInputParams: [StructuredPropertyInputParams!]! +} + +""" +A data type registered in DataHub +""" +type DataTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: DataTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +A well-supported, standard DataHub Data Type. +""" +enum StdDataType { + """ + String data type + """ + STRING + + """ + Number data type + """ + NUMBER + + """ + Urn data type + """ + URN + + """ + Rich text data type. Right now this is markdown only. + """ + RICH_TEXT + + """ + Date data type in format YYYY-MM-DD + """ + DATE + + """ + Any other data type - refer to a provided data type urn. + """ + OTHER +} + +""" +Properties about an individual data type +""" +type DataTypeInfo { + """ + The standard data type + """ + type: StdDataType! + + """ + The fully qualified name of the type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index e0cde5a2db9f99..13c1ff2e8a7648 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -143,6 +143,62 @@ input SearchFlags { Whether to request for search suggestions on the _entityName virtualized field """ getSuggestions: Boolean + + """ + Additional grouping specifications to apply to the search results + Grouping specifications will control how search results are grouped together + in the response. This is currently being used to group schema fields (columns) + as datasets, and in the future will be used to group other entities as well. + Note: This is an experimental feature and is subject to change. + """ + groupingSpec: GroupingSpec + + """ + Whether to include soft deleted entities + """ + includeSoftDeleted: Boolean + + """ + Whether to include restricted entities + """ + includeRestricted: Boolean +} + +""" +Flags to control lineage behavior +""" +input LineageFlags { + """ + Limits the number of results explored per hop, still gets all edges each time a hop happens + """ + entitiesExploredPerHopLimit: Int + + """ + An optional starting time to filter on + """ + startTimeMillis: Long + """ + An optional ending time to filter on + """ + endTimeMillis: Long + + """ + Map of entity types to platforms to ignore when counting hops during graph walk. Note: this can potentially cause + a large amount of additional hops to occur and should be used with caution. + """ + ignoreAsHops: [EntityTypeToPlatforms!] +} + +input EntityTypeToPlatforms { + """ + Entity type to ignore as hops, if no platform is applied applies to all entities of this type. + """ + entityType: EntityType! + + """ + List of platforms to ignore as hops, empty implies all. Must be a valid platform urn + """ + platforms: [String!] } """ @@ -278,6 +334,7 @@ input ScrollAcrossEntitiesInput { searchFlags: SearchFlags } + """ Input arguments for a search query over the results of a multi-hop graph query """ @@ -325,16 +382,21 @@ input SearchAcrossLineageInput { """ An optional starting time to filter on """ - startTimeMillis: Long + startTimeMillis: Long @deprecated(reason: "Use LineageFlags instead") """ An optional ending time to filter on """ - endTimeMillis: Long + endTimeMillis: Long @deprecated(reason: "Use LineageFlags instead") """ Flags controlling search options """ searchFlags: SearchFlags + + """ + Flags controlling the lineage query + """ + lineageFlags: LineageFlags } """ @@ -395,6 +457,11 @@ input ScrollAcrossLineageInput { Flags controlling search options """ searchFlags: SearchFlags + + """ + Flags controlling the lineage query + """ + lineageFlags: LineageFlags } """ @@ -577,7 +644,7 @@ type ScrollResults { } """ -Results returned by issueing a search across relationships query +Results returned by issuing a search across relationships query """ type SearchAcrossLineageResults { """ @@ -612,7 +679,7 @@ type SearchAcrossLineageResults { } """ -Results returned by issueing a search across relationships query using scroll API +Results returned by issuing a search across relationships query using scroll API """ type ScrollAcrossLineageResults { """ @@ -669,6 +736,17 @@ type SearchAcrossLineageResult { Degree of relationship (number of hops to get to entity) """ degree: Int! + + """ + Degrees of relationship (for entities discoverable at multiple degrees) + """ + degrees: [Int!] + + """ + Marks whether or not this entity was explored further for lineage + """ + explored: Boolean! + } """ @@ -1139,7 +1217,7 @@ type QuickFilter { } """ -Freshness stats for a query result. +Freshness stats for a query result. Captures whether the query was served out of a cache, what the staleness was, etc. """ type FreshnessStats { @@ -1154,7 +1232,7 @@ type FreshnessStats { In case an index was consulted, this reflects the freshness of the index """ systemFreshness: [SystemFreshness] - + } type SystemFreshness { @@ -1176,9 +1254,14 @@ Input required for browse queries """ input BrowseV2Input { """ - The browse entity type + The browse entity type - deprecated use types instead """ - type: EntityType! + type: EntityType + + """ + The browse entity type - deprecated use types instead. If not provided, all types will be used. + """ + types: [EntityType!] """ The browse path V2 - a list with each entry being part of the browse path V2 @@ -1209,6 +1292,11 @@ input BrowseV2Input { The search query string """ query: String + + """ + Flags controlling search options + """ + searchFlags: SearchFlags } """ @@ -1299,3 +1387,36 @@ input SortCriterion { """ sortOrder: SortOrder! } + +""" +A grouping specification for search results. +""" +input GroupingSpec { + + """ + A list of grouping criteria for grouping search results. + There is no implied order in the grouping criteria. + """ + groupingCriteria: [GroupingCriterion!] + +} + +""" +A single grouping criterion for grouping search results +""" +input GroupingCriterion { + + """ + The base entity type that needs to be grouped + e.g. schemaField + Omitting this field will result in all base entities being grouped into the groupingEntityType. + """ + baseEntityType: EntityType + + """ + The type of entity being grouped into + e.g. dataset, domain, etc. + """ + groupingEntityType: EntityType! + +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/tests.graphql b/datahub-graphql-core/src/main/resources/tests.graphql index 9dce48ac60d834..579f4919bdc783 100644 --- a/datahub-graphql-core/src/main/resources/tests.graphql +++ b/datahub-graphql-core/src/main/resources/tests.graphql @@ -44,6 +44,7 @@ Definition of the test type TestDefinition { """ JSON-based def for the test + Deprecated! JSON representation is no longer supported. """ json: String } @@ -209,6 +210,7 @@ input UpdateTestInput { input TestDefinitionInput { """ The string representation of the Test + Deprecated! JSON representation is no longer supported. """ json: String } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 69cd73ecd7d68d..6608f454ae8c61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static org.mockito.Mockito.mock; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -10,22 +12,26 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.List; import org.mockito.Mockito; public class TestUtils { - public static EntityService getMockEntityService() { + public static EntityService getMockEntityService() { PathSpecBasedSchemaAnnotationVisitor.class .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); - EntityService mockEntityService = Mockito.mock(EntityService.class); + EntityService mockEntityService = + (EntityService) Mockito.mock(EntityService.class); Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); return mockEntityService; } @@ -35,28 +41,31 @@ public static QueryContext getMockAllowContext() { } public static QueryContext getMockAllowContext(String actorUrn) { - QueryContext mockContext = Mockito.mock(QueryContext.class); + QueryContext mockContext = mock(QueryContext.class); Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()) - .thenReturn( - new Authentication( - new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Authentication authentication = + new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); + Mockito.when(mockContext.getAuthentication()).thenReturn(authentication); + OperationContext operationContext = + TestOperationContexts.userContextNoSearchAuthorization(mockAuthorizer, authentication); + Mockito.when(mockContext.getOperationContext()).thenReturn(operationContext); + return mockContext; } public static QueryContext getMockAllowContext(String actorUrn, AuthorizationRequest request) { - QueryContext mockContext = Mockito.mock(QueryContext.class); + QueryContext mockContext = mock(QueryContext.class); Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); @@ -65,6 +74,8 @@ public static QueryContext getMockAllowContext(String actorUrn, AuthorizationReq .thenReturn( new Authentication( new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } @@ -73,11 +84,11 @@ public static QueryContext getMockDenyContext() { } public static QueryContext getMockDenyContext(String actorUrn) { - QueryContext mockContext = Mockito.mock(QueryContext.class); + QueryContext mockContext = mock(QueryContext.class); Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); @@ -86,15 +97,17 @@ public static QueryContext getMockDenyContext(String actorUrn) { .thenReturn( new Authentication( new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequest request) { - QueryContext mockContext = Mockito.mock(QueryContext.class); + QueryContext mockContext = mock(QueryContext.class); Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); @@ -103,36 +116,44 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ .thenReturn( new Authentication( new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); return mockContext; } public static void verifyIngestProposal( - EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + EntityService mockService, + int numberOfInvocations, + MetadataChangeProposal proposal) { verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); } public static void verifyIngestProposal( - EntityService mockService, int numberOfInvocations, List proposals) { + EntityService mockService, + int numberOfInvocations, + List proposals) { AspectsBatchImpl batch = - AspectsBatchImpl.builder().mcps(proposals, mockService.getEntityRegistry()).build(); + AspectsBatchImpl.builder().mcps(proposals, mock(AuditStamp.class), mockService).build(); Mockito.verify(mockService, Mockito.times(numberOfInvocations)) - .ingestProposal(Mockito.eq(batch), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.eq(batch), Mockito.eq(false)); } public static void verifySingleIngestProposal( - EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + EntityService mockService, + int numberOfInvocations, + MetadataChangeProposal proposal) { Mockito.verify(mockService, Mockito.times(numberOfInvocations)) .ingestProposal(Mockito.eq(proposal), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations) { Mockito.verify(mockService, Mockito.times(numberOfInvocations)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); } public static void verifySingleIngestProposal( - EntityService mockService, int numberOfInvocations) { + EntityService mockService, int numberOfInvocations) { Mockito.verify(mockService, Mockito.times(numberOfInvocations)) .ingestProposal( Mockito.any(MetadataChangeProposal.class), @@ -140,12 +161,9 @@ public static void verifySingleIngestProposal( Mockito.eq(false)); } - public static void verifyNoIngestProposal(EntityService mockService) { + public static void verifyNoIngestProposal(EntityService mockService) { Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } private TestUtils() {} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index 0d87ce4b2e2adf..3f228efafac429 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -2,10 +2,11 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.LineageEdge; @@ -16,8 +17,10 @@ import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletionException; import org.joda.time.DateTimeUtils; import org.mockito.Mockito; @@ -64,10 +67,8 @@ public void testUpdateDatasetLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -79,8 +80,7 @@ public void testFailUpdateWithMissingDownstream() throws Exception { mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(false); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(false); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))).thenAnswer(args -> Set.of()); assertThrows(CompletionException.class, () -> resolver.get(_mockEnv).join()); } @@ -93,9 +93,8 @@ public void testUpdateChartLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -112,10 +111,8 @@ public void testUpdateDashboardLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DASHBOARD_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -133,11 +130,8 @@ public void testUpdateDataJobLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertTrue(resolver.get(_mockEnv).get()); } @@ -153,15 +147,13 @@ public void testFailUpdateLineageNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(0)); assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } @@ -169,7 +161,7 @@ public void testFailUpdateLineageNoPermissions() throws Exception { private void mockInputAndContext(List edgesToAdd, List edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 019d254ffdaac7..f09ead41e5c467 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -31,7 +32,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -49,24 +51,23 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(1)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) .getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -74,7 +75,8 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -85,24 +87,23 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(1)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) .getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -111,32 +112,32 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(false); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); Mockito.verify(mockClient, Mockito.times(0)) .deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class)); + eq(Urn.createFromString(TEST_ASSERTION_URN)), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + eq(Constants.ASSERTION_ENTITY_NAME), + eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), Mockito.any(Authentication.class)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); + .exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -144,7 +145,8 @@ public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.getAspect( Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) @@ -161,7 +163,7 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -178,14 +180,15 @@ public void testGetEntityClientException() throws Exception { .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java index 19152a7a11877a..1a2d53b09f6b10 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java @@ -119,6 +119,11 @@ public void testGetSuccess() throws Exception { .setUrn(assertionUrn) .setAspects(new EnvelopedAspectMap(assertionAspects)))); + Mockito.when( + mockClient.exists( + Mockito.any(Urn.class), Mockito.eq(false), Mockito.any(Authentication.class))) + .thenReturn(true); + EntityAssertionsResolver resolver = new EntityAssertionsResolver(mockClient, graphClient); // Execute resolver @@ -128,6 +133,8 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("count"), Mockito.eq(200))).thenReturn(10); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("includeSoftDeleted"), Mockito.eq(false))) + .thenReturn(false); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Dataset parentEntity = new Dataset(); @@ -148,6 +155,9 @@ public void testGetSuccess() throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .batchGetV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); + Mockito.verify(mockClient, Mockito.times(1)) + .exists(Mockito.any(), Mockito.any(), Mockito.any()); + // Assert that GraphQL assertion run event matches expectations assertEquals(result.getStart(), 0); assertEquals(result.getCount(), 1); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 419eb71d5e143d..ad30e48d8361b8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -11,7 +12,6 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -43,14 +43,13 @@ public void testGetSuccess() throws Exception { final Authentication testAuth = getAuthentication(mockEnv); Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(buildFilter(filters, Collections.emptyList())), Mockito.any(SortCriterion.class), Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) + Mockito.eq(input.getCount()))) .thenReturn( new SearchResult() .setFrom(0) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index bffc2b31af2b9a..892ba4e1ebb3eb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -26,6 +26,7 @@ import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; @@ -44,6 +45,7 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = initMockEntityClient( @@ -70,7 +72,8 @@ public static void testBrowseV2Success() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -87,6 +90,7 @@ public static void testBrowseV2Success() throws Exception { @Test public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); List orFilters = new ArrayList<>(); @@ -123,7 +127,8 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -143,6 +148,7 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { @Test public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); + FormService mockFormService = Mockito.mock(FormService.class); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); EntityClient mockClient = @@ -170,7 +176,8 @@ public static void testBrowseV2SuccessWithView() throws Exception { .setFrom(0) .setPageSize(10)); - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, viewService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -249,13 +256,13 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.browseV2( - Mockito.eq(entityName), + Mockito.any(), + Mockito.eq(ImmutableList.of(entityName)), Mockito.eq(path), Mockito.eq(filter), Mockito.eq(query), Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class))) + Mockito.eq(limit))) .thenReturn(result); return client; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 1203f4e22bdc23..c63c9bccab68b5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -22,6 +24,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -34,7 +37,7 @@ public class ContainerEntitiesResolverTest { @Test public void testGetSuccess() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; @@ -47,6 +50,7 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), Mockito.eq("*"), Mockito.eq( @@ -59,8 +63,7 @@ public void testGetSuccess() throws Exception { Mockito.eq(0), Mockito.eq(20), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn( new SearchResult() .setFrom(0) @@ -76,9 +79,10 @@ public void testGetSuccess() throws Exception { ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); // Execute resolver - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index b4c58ca182b2f3..6b0cda2957be62 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -18,7 +19,9 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.registry.EntityRegistry; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,6 +34,9 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java index 2abfa39b35149a..be02781685d4b6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java @@ -1,8 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import com.datahub.authentication.Authentication; +import com.datahub.authorization.AuthorizationResult; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dashboard.DashboardUsageStatistics; @@ -49,6 +54,12 @@ public void testGetSuccess() throws Exception { // Execute resolver DashboardStatsSummaryResolver resolver = new DashboardStatsSummaryResolver(mockClient); QueryContext mockContext = Mockito.mock(QueryContext.class); + Authorizer mockAuthorizor = mock(Authorizer.class); + when(mockAuthorizor.authorize(any())) + .thenAnswer( + args -> + new AuthorizationResult(args.getArgument(0), AuthorizationResult.Type.ALLOW, "")); + when(mockContext.getAuthorizer()).thenReturn(mockAuthorizor); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getSource()).thenReturn(TEST_SOURCE); @@ -84,16 +95,6 @@ public void testGetSuccess() throws Exception { Mockito.eq(1), Mockito.eq(filterForLatestStats))) .thenReturn(ImmutableList.of(newResult)); - - // Then verify that the new result is _not_ returned (cache hit) - DashboardStatsSummary cachedResult = resolver.get(mockEnv).get(); - Assert.assertEquals((int) cachedResult.getViewCount(), 20); - Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals( - (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals( - (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); - Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 2); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java index 52516295f97ade..92ddfcaa3db766 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java @@ -87,16 +87,6 @@ public void testGetSuccess() throws Exception { mockClient.getUsageStats( Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) .thenReturn(newResult); - - // Then verify that the new result is _not_ returned (cache hit) - DatasetStatsSummary cachedResult = resolver.get(mockEnv).get(); - Assert.assertEquals((int) cachedResult.getQueryCountLast30Days(), 10); - Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals( - (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals( - (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); - Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 5); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index 49ccc751d35f63..f83adf33d521ae 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,7 +15,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.List; @@ -48,8 +48,10 @@ public void testGetSuccessNoExistingStatus() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -95,8 +97,10 @@ public void testGetSuccessExistingStatus() throws Exception { Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -139,8 +143,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); @@ -184,10 +190,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index 8c3620fa978a98..f25d5a4cbbf045 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -16,7 +16,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.List; @@ -49,8 +49,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -110,8 +112,10 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -164,8 +168,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); @@ -217,10 +223,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index e4be330f5ba2ac..f4b45b3dc8f290 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -45,9 +46,9 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -58,7 +59,8 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -66,7 +68,7 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); @@ -81,10 +83,10 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(eq(proposal), Mockito.any(Authentication.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -101,9 +103,9 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -119,7 +121,8 @@ public void testGetSuccessExistingDeprecation() throws Exception { .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -127,7 +130,7 @@ public void testGetSuccessExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); @@ -142,10 +145,10 @@ public void testGetSuccessExistingDeprecation() throws Exception { UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(eq(proposal), Mockito.any(Authentication.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -155,9 +158,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when( mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), Mockito.any(Authentication.class))) .thenReturn( ImmutableMap.of( @@ -168,7 +171,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -176,7 +180,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -193,7 +197,7 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -214,7 +218,7 @@ public void testGetEntityClientException() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index d5ba88066e8461..81343b75f7d7e6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,7 +18,7 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; @@ -54,11 +54,15 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -89,7 +93,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test @@ -114,11 +118,15 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -154,7 +162,7 @@ public void testGetSuccessExistingDomains() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test @@ -179,11 +187,15 @@ public void testGetSuccessUnsetDomains() throws Exception { Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -223,8 +235,10 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(false); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -261,9 +275,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); @@ -311,10 +328,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 6184760abfabda..33cd1cb63d621e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -71,14 +72,14 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter( TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -121,12 +122,12 @@ public void testGetSuccessNoParentDomain() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -194,14 +195,14 @@ public void testGetNameConflict() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter( TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn( new SearchResult() .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 5632654a26ad92..69fb98fbf9e310 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -31,12 +32,12 @@ public void testGetSuccess() throws Exception { // Domain has 0 child domains Mockito.when( mockClient.filter( + any(), Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), - Mockito.eq(1), - Mockito.any())) + Mockito.eq(1))) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); @@ -60,12 +61,12 @@ public void testDeleteWithChildDomains() throws Exception { // Domain has child domains Mockito.when( mockClient.filter( + any(), Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), - Mockito.eq(1), - Mockito.any())) + Mockito.eq(1))) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 9596abf55d04fc..f970f9e2ea431d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -10,7 +12,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; @@ -24,6 +26,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.stream.Collectors; import org.mockito.Mockito; @@ -50,6 +53,7 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.eq( SEARCHABLE_ENTITY_TYPES.stream() .map(EntityTypeMapper::getName) @@ -65,8 +69,7 @@ public void testGetSuccess() throws Exception { Mockito.eq(0), Mockito.eq(20), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn( new SearchResult() .setFrom(0) @@ -84,6 +87,7 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index ffc3e823d83510..53a16ed5f6cc8a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertThrows; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,7 +13,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -43,6 +42,7 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), @@ -51,9 +51,7 @@ public void testGetSuccess() throws Exception { .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING)), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -87,6 +85,7 @@ public void testGetSuccessNoParentDomain() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq(""), Mockito.eq(DomainUtils.buildParentDomainFilter(null)), @@ -95,9 +94,7 @@ public void testGetSuccessNoParentDomain() throws Exception { .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) .setOrder(SortOrder.DESCENDING)), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -139,13 +136,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq("*"), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test @@ -155,13 +151,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index a0eff5d0574dbc..83ebe481708b52 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -2,6 +2,8 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -52,14 +54,14 @@ private MetadataChangeProposal setupTests( Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter( name, Urn.createFromString(PARENT_DOMAIN_URN))), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); @@ -73,7 +75,8 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -92,7 +95,8 @@ public void testGetSuccess() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -115,7 +119,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetFailureParentDoesNotExist() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -130,7 +135,8 @@ public void testGetFailureParentDoesNotExist() throws Exception { public void testGetFailureParentIsNotDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 4c8ceff9c4f80c..a0a9c984897e2b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import com.datahub.authentication.Authentication; @@ -15,7 +16,9 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.registry.EntityRegistry; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -28,6 +31,9 @@ public void testGetSuccessForDomain() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index ad5ad2315ce43c..7b8d11802792b6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -58,8 +59,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -82,10 +85,10 @@ public void testGetSuccessNoExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -119,8 +122,10 @@ public void testGetSuccessExistingDomains() throws Exception { .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -143,10 +148,10 @@ public void testGetSuccessExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -170,8 +175,10 @@ public void testGetFailureDomainDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(false); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -208,8 +215,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index 7e6e2581688985..7ac45fe98b131f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -57,7 +58,8 @@ public void testGetSuccessNoExistingDomains() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -77,7 +79,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -111,7 +113,8 @@ public void testGetSuccessExistingDomains() throws Exception { .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -131,7 +134,7 @@ public void testGetSuccessExistingDomains() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -155,7 +158,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index 45a17744a26971..ed04a14ed7c3a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -2,12 +2,12 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -19,7 +19,7 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -48,7 +48,8 @@ public void testGetSuccessNoExistingEmbed() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -69,7 +70,7 @@ public void testGetSuccessNoExistingEmbed() throws Exception { ; Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -86,7 +87,8 @@ public void testGetSuccessExistingEmbed() throws Exception { Mockito.eq(0L))) .thenReturn(originalEmbed); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -106,7 +108,7 @@ public void testGetSuccessExistingEmbed() throws Exception { verifySingleIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -129,7 +131,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -142,8 +145,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); ; } @@ -161,8 +163,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index fa8b1d6a747ca2..c3c9ccea6d2703 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -3,6 +3,7 @@ import static org.mockito.Mockito.*; import static org.testng.Assert.*; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; @@ -33,7 +34,7 @@ public void testFailsNullEntity() { @Test public void testPasses() throws Exception { when(_dataFetchingEnvironment.getArgument("urn")).thenReturn(ENTITY_URN_STRING); - when(_entityService.exists(any())).thenReturn(true); + when(_entityService.exists(any(Urn.class), eq(true))).thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolverTest.java new file mode 100644 index 00000000000000..79af61b1bc79da --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolverTest.java @@ -0,0 +1,83 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.com.google.common.collect.ImmutableList; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class BatchRemoveFormResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final BatchAssignFormInput TEST_INPUT = + new BatchAssignFormInput(TEST_FORM_URN, ImmutableList.of(TEST_DATASET_URN)); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true); + BatchRemoveFormResolver resolver = new BatchRemoveFormResolver(mockFormService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + boolean success = resolver.get(mockEnv).get(); + + assertTrue(success); + + // Validate that we called unassign on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .batchUnassignFormForEntities( + Mockito.eq(ImmutableList.of(UrnUtils.getUrn(TEST_DATASET_URN))), + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)), + Mockito.any(Authentication.class)); + } + + @Test + public void testThrowsError() throws Exception { + FormService mockFormService = initMockFormService(false); + BatchRemoveFormResolver resolver = new BatchRemoveFormResolver(mockFormService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we called unassign on the service - but it throws an error + Mockito.verify(mockFormService, Mockito.times(1)) + .batchUnassignFormForEntities( + Mockito.eq(ImmutableList.of(UrnUtils.getUrn(TEST_DATASET_URN))), + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)), + Mockito.any(Authentication.class)); + } + + private FormService initMockFormService(final boolean shouldSucceed) throws Exception { + FormService service = Mockito.mock(FormService.class); + + if (!shouldSucceed) { + Mockito.doThrow(new RuntimeException()) + .when(service) + .batchUnassignFormForEntities( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + return service; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java new file mode 100644 index 00000000000000..0fe57d0a28fffd --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java @@ -0,0 +1,167 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IsFormAssignedToMeResolverTest { + + private static final Urn TEST_USER_1 = UrnUtils.getUrn("urn:li:corpuser:test-1"); + private static final Urn TEST_USER_2 = UrnUtils.getUrn("urn:li:corpuser:test-2"); + private static final Urn TEST_GROUP_1 = UrnUtils.getUrn("urn:li:corpGroup:test-1"); + private static final Urn TEST_GROUP_2 = UrnUtils.getUrn("urn:li:corpGroup:test-2"); + + @Test + public void testGetSuccessUserMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, Collections.emptyList()); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessGroupMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessBothMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); // is matching user + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessNoMatchNullAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchEmptyAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setUsers(Collections.emptyList()); + actors.setGroups(Collections.emptyList()); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchNoAssignmentMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_2.toString()); // Does not match. + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + private GroupService mockGroupService(final Urn userUrn, final List groupUrns) + throws Exception { + GroupService mockService = Mockito.mock(GroupService.class); + Mockito.when( + mockService.getGroupsForUser(Mockito.eq(userUrn), Mockito.any(Authentication.class))) + .thenReturn(groupUrns); + return mockService; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java new file mode 100644 index 00000000000000..192f4ff9aa7c7b --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java @@ -0,0 +1,122 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class VerifyFormResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final VerifyFormInput TEST_INPUT = + new VerifyFormInput(TEST_FORM_URN, TEST_DATASET_URN); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + boolean success = resolver.get(mockEnv).get(); + + assertTrue(success); + + // Validate that we called verify on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity( + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)), + Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN)), + Mockito.any(Authentication.class)); + } + + @Test + public void testGetUnauthorized() throws Exception { + FormService mockFormService = initMockFormService(false, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do not call verify on the service + Mockito.verify(mockFormService, Mockito.times(0)) + .verifyFormForEntity(Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + @Test + public void testThrowErrorOnVerification() throws Exception { + FormService mockFormService = initMockFormService(true, false); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do call verifyFormForEntity but an error is thrown + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity(Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } + + private FormService initMockFormService( + final boolean isFormAssignedToUser, final boolean shouldVerify) throws Exception { + FormService service = Mockito.mock(FormService.class); + Mockito.when( + service.isFormAssignedToUser( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(isFormAssignedToUser); + + if (shouldVerify) { + Mockito.when( + service.verifyFormForEntity( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class))) + .thenReturn(true); + } else { + Mockito.when( + service.verifyFormForEntity( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class))) + .thenThrow(new RuntimeException()); + } + + return service; + } + + private GroupService initMockGroupService() throws Exception { + GroupService service = Mockito.mock(GroupService.class); + Mockito.when(service.getGroupsForUser(Mockito.any(), Mockito.any(Authentication.class))) + .thenReturn(new ArrayList<>()); + + return service; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 287d270ab569ca..509f776a01300b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; @@ -10,6 +11,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; @@ -28,9 +30,9 @@ private EntityService setUpService() { EntityService mockService = getMockEntityService(); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + eq(0L))) .thenReturn(null); return mockService; } @@ -38,12 +40,16 @@ private EntityService setUpService() { @Test public void testGetSuccessIsRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -52,28 +58,32 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetSuccessHasRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -82,33 +92,35 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetFailAddSelfAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -118,17 +130,19 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { @Test public void testGetFailAddNonTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -138,18 +152,21 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { @Test public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -159,18 +176,21 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { @Test public void testGetFailAddToNonExistentUrn() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); RelatedTermsInput input = new RelatedTermsInput( TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -180,18 +200,21 @@ public void testGetFailAddToNonExistentUrn() throws Exception { @Test public void testGetFailAddToNonTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(DATASET_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(DATASET_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); RelatedTermsInput input = new RelatedTermsInput( DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -201,12 +224,16 @@ public void testGetFailAddToNonTerm() throws Exception { @Test public void testFailNoPermissions() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -215,7 +242,7 @@ public void testFailNoPermissions() throws Exception { TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 6653b19d6ef2bd..72937cb650368f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -3,6 +3,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.assertThrows; import com.datahub.authentication.Authentication; @@ -127,12 +128,12 @@ public void testGetFailureExistingTermSameName() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), Mockito.any(), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(1000), - Mockito.any())) + Mockito.eq(1000))) .thenReturn( new SearchResult() .setEntities( @@ -177,12 +178,12 @@ private EntityClient initMockClient() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), Mockito.any(), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(1000), - Mockito.any())) + Mockito.eq(1000))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( mockClient.batchGetV2( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 7229d2acf763d5..f4d4c528dc0c69 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -26,7 +27,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -50,7 +52,8 @@ public void testGetEntityClientException() throws Exception { .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index b879baf1e65dcd..60787fc47c88a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +23,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -33,6 +36,7 @@ public class GetRootGlossaryNodesResolverTest { public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -41,12 +45,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), Mockito.eq(buildGlossaryEntitiesFilter()), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class))) + Mockito.eq(100))) .thenReturn( new SearchResult() .setEntities( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 201bea752d53f0..51760ff9d37f25 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +23,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -33,6 +36,7 @@ public class GetRootGlossaryTermsResolverTest { public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -41,12 +45,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), Mockito.eq(buildGlossaryEntitiesFilter()), Mockito.eq(null), Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class))) + Mockito.eq(100))) .thenReturn( new SearchResult() .setEntities( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 446f58bec73aa1..a0430a9d75827f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -20,7 +21,9 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.models.registry.EntityRegistry; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -33,6 +36,9 @@ public void testGetSuccessForTerm() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -140,6 +146,9 @@ public void testGetSuccessForNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 47de668b2c9dc3..f9a718dab0a2c1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -12,6 +13,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; @@ -41,9 +43,11 @@ public void testGetSuccessIsA() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -56,7 +60,7 @@ public void testGetSuccessIsA() throws Exception { assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -73,9 +77,11 @@ public void testGetSuccessHasA() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -88,7 +94,7 @@ public void testGetSuccessHasA() throws Exception { assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -101,9 +107,11 @@ public void testFailAspectDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -131,9 +139,11 @@ public void testFailNoPermissions() throws Exception { Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -146,6 +156,6 @@ public void testFailNoPermissions() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); Mockito.verify(mockService, Mockito.times(0)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index 3972715fcefb17..5b858b810657ab 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -2,6 +2,8 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -61,7 +63,7 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -76,7 +78,7 @@ public void testGetSuccess() throws Exception { public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(NODE_URN)), eq(true))).thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_NODE); @@ -106,7 +108,8 @@ public void testGetSuccessForNode() throws Exception { public void testGetSuccessForDomain() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_DOMAIN); @@ -123,13 +126,13 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.eq( DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), Mockito.eq(null), Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class))) + Mockito.any(Integer.class))) .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); @@ -148,7 +151,8 @@ public void testGetSuccessForDomain() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index 74a59b10a40b01..cdab78023b846d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -63,8 +64,9 @@ private MetadataChangeProposal setupTests( public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -80,8 +82,9 @@ public void testGetSuccess() throws Exception { public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(NODE_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -114,8 +117,10 @@ public void testGetSuccessForNode() throws Exception { public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -131,8 +136,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -148,8 +154,9 @@ public void testGetFailureNodeDoesNotExist() throws Exception { public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TERM_URN)), eq(true))).thenReturn(true); + Mockito.when( + mockService.exists(eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java similarity index 97% rename from datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java rename to datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java index 3ff0120448e545..2129821e0d95fa 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.graphql.resolvers.dataset; +package com.linkedin.datahub.graphql.resolvers.health; import static org.testng.Assert.*; @@ -14,6 +14,7 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Health; import com.linkedin.datahub.graphql.generated.HealthStatus; +import com.linkedin.datahub.graphql.resolvers.dataset.DatasetHealthResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; @@ -25,7 +26,8 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -public class DatasetHealthResolverTest { +// TODO: Update this test once assertions summary has been added. +public class EntityHealthResolverTest { private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java new file mode 100644 index 00000000000000..86c7b86978127d --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java @@ -0,0 +1,168 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver.*; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.EntityIncidentsResult; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.HashMap; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class EntityIncidentsResolverTest { + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + + Urn assertionUrn = Urn.createFromString("urn:li:assertion:test"); + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); + Urn incidentUrn = Urn.createFromString("urn:li:incident:test-guid"); + + Map incidentAspects = new HashMap<>(); + incidentAspects.put( + Constants.INCIDENT_KEY_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new IncidentKey().setId("test-guid").data()))); + + IncidentInfo expectedInfo = + new IncidentInfo() + .setType(IncidentType.OPERATIONAL) + .setCustomType("Custom Type") + .setDescription("Description") + .setPriority(5) + .setTitle("Title") + .setEntities(new UrnArray(ImmutableList.of(datasetUrn))) + .setSource( + new IncidentSource().setType(IncidentSourceType.MANUAL).setSourceUrn(assertionUrn)) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage("Message") + .setLastUpdated(new AuditStamp().setTime(1L).setActor(userUrn))) + .setCreated(new AuditStamp().setTime(0L).setActor(userUrn)); + + incidentAspects.put( + Constants.INCIDENT_INFO_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(expectedInfo.data()))); + + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, datasetUrn.toString()); + Filter expectedFilter = QueryUtils.newFilter(criterionMap); + + SortCriterion expectedSort = new SortCriterion(); + expectedSort.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); + expectedSort.setOrder(SortOrder.DESCENDING); + + Mockito.when( + mockClient.filter( + Mockito.any(), + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(expectedFilter), + Mockito.eq(expectedSort), + Mockito.eq(0), + Mockito.eq(10))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(incidentUrn))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(incidentUrn)), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + incidentUrn, + new EntityResponse() + .setEntityName(Constants.INCIDENT_ENTITY_NAME) + .setUrn(incidentUrn) + .setAspects(new EnvelopedAspectMap(incidentAspects)))); + + EntityIncidentsResolver resolver = new EntityIncidentsResolver(mockClient); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("count"), Mockito.eq(20))).thenReturn(10); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Dataset parentEntity = new Dataset(); + parentEntity.setUrn(datasetUrn.toString()); + Mockito.when(mockEnv.getSource()).thenReturn(parentEntity); + + EntityIncidentsResult result = resolver.get(mockEnv).get(); + + // Assert that GraphQL Incident run event matches expectations + assertEquals(result.getStart(), 0); + assertEquals(result.getCount(), 1); + assertEquals(result.getTotal(), 1); + + com.linkedin.datahub.graphql.generated.Incident incident = + resolver.get(mockEnv).get().getIncidents().get(0); + assertEquals(incident.getUrn(), incidentUrn.toString()); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getIncidentType().toString(), expectedInfo.getType().toString()); + assertEquals(incident.getTitle(), expectedInfo.getTitle()); + assertEquals(incident.getDescription(), expectedInfo.getDescription()); + assertEquals(incident.getCustomType(), expectedInfo.getCustomType()); + assertEquals( + incident.getStatus().getState().toString(), expectedInfo.getStatus().getState().toString()); + assertEquals(incident.getStatus().getMessage(), expectedInfo.getStatus().getMessage()); + assertEquals( + incident.getStatus().getLastUpdated().getTime(), + expectedInfo.getStatus().getLastUpdated().getTime()); + assertEquals( + incident.getStatus().getLastUpdated().getActor(), + expectedInfo.getStatus().getLastUpdated().getActor().toString()); + assertEquals( + incident.getSource().getType().toString(), expectedInfo.getSource().getType().toString()); + assertEquals( + incident.getSource().getSource().getUrn(), + expectedInfo.getSource().getSourceUrn().toString()); + assertEquals(incident.getCreated().getActor(), expectedInfo.getCreated().getActor().toString()); + assertEquals(incident.getCreated().getTime(), expectedInfo.getCreated().getTime()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index e5cb43c4dab617..e0555f5886b8bb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +22,7 @@ import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; import com.linkedin.secret.DataHubSecretValue; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; public class IngestTestUtils { @@ -43,6 +45,7 @@ public static QueryContext getMockAllowContext() { Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); return mockContext; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 3de88333b959d1..f3e27d91f39df0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -4,8 +4,10 @@ import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; +import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.metadata.Constants; import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -19,7 +21,9 @@ public void testCanManageIngestionAuthorized() throws Exception { AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", "MANAGE_INGESTION", Optional.empty()); + "urn:li:corpuser:authorized", + "MANAGE_INGESTION", + Optional.of(new EntitySpec(Constants.INGESTION_SOURCE_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -38,7 +42,9 @@ public void testCanManageIngestionUnauthorized() throws Exception { AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", "MANAGE_INGESTION", Optional.empty()); + "urn:li:corpuser:unauthorized", + "MANAGE_INGESTION", + Optional.of(new EntitySpec(Constants.INGESTION_SOURCE_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -56,7 +62,10 @@ public void testCanManageSecretsAuthorized() throws Exception { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); AuthorizationRequest request = - new AuthorizationRequest("urn:li:corpuser:authorized", "MANAGE_SECRETS", Optional.empty()); + new AuthorizationRequest( + "urn:li:corpuser:authorized", + "MANAGE_SECRETS", + Optional.of(new EntitySpec(Constants.SECRETS_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -75,7 +84,9 @@ public void testCanManageSecretsUnauthorized() throws Exception { AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", "MANAGE_SECRETS", Optional.empty()); + "urn:li:corpuser:unauthorized", + "MANAGE_SECRETS", + Optional.of(new EntitySpec(Constants.SECRETS_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index fdb150e6924417..d64a41d59b30ec 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -27,6 +29,7 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -36,17 +39,17 @@ public class IngestionSourceExecutionRequestsResolverTest { @Test public void testGetSuccess() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); // Mock filter response Mockito.when( mockClient.filter( + any(), Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), Mockito.any(Filter.class), Mockito.any(SortCriterion.class), Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) + Mockito.eq(10))) .thenReturn( new SearchResult() .setFrom(0) @@ -101,7 +104,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -121,12 +125,12 @@ public void testGetSuccess() throws Exception { @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); @@ -140,18 +144,13 @@ public void testGetUnauthorized() throws Exception { .batchGetV2( Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) - .list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + .list(Mockito.any(), Mockito.any(), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .batchGetV2( @@ -160,7 +159,7 @@ public void testGetEntityClientException() throws Exception { new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index bec141bddf2600..6ae2fa7dcbf644 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -46,7 +47,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) - .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); + .rollbackIngestion(Mockito.eq(RUN_ID), any(), any(Authentication.class)); } @Test @@ -58,7 +59,7 @@ public void testRollbackIngestionMethod() throws Exception { resolver.rollbackIngestion(RUN_ID, mockContext).get(); Mockito.verify(mockClient, Mockito.times(1)) - .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); + .rollbackIngestion(Mockito.eq(RUN_ID), any(), any(Authentication.class)); } @Test @@ -66,7 +67,7 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RuntimeException.class) .when(mockClient) - .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); + .rollbackIngestion(any(), any(), any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index eafdfde364947c..ae14b75cc374cd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -13,12 +13,12 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubSecretKey; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import org.mockito.Mockito; import org.testng.annotations.Test; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 495adb27dbd5d3..effec8662a9aa5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -16,10 +16,10 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.HashSet; import java.util.List; import org.mockito.Mockito; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index 7d89f4aafa01a5..7a1876466573d4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -14,7 +16,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; @@ -22,6 +23,7 @@ import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -39,14 +41,13 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.SECRETS_ENTITY_NAME), Mockito.eq(""), Mockito.eq(null), Mockito.any(SortCriterion.class), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -101,6 +102,7 @@ public void testGetUnauthorized() throws Exception { QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) @@ -108,14 +110,13 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.eq(null), Mockito.any(SortCriterion.class), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java new file mode 100644 index 00000000000000..0154a94c56a512 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java @@ -0,0 +1,98 @@ +package com.linkedin.datahub.graphql.resolvers.ingest.secret; + +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.getMockDenyContext; +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.when; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateSecretInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.secret.DataHubSecretValue; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpdateSecretResolverTest { + + private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:secret:secret-id"); + + private static final UpdateSecretInput TEST_INPUT = + new UpdateSecretInput(TEST_URN.toString(), "MY_SECRET", "mysecretvalue", "dummy"); + + private DataFetchingEnvironment mockEnv; + private EntityClient mockClient; + private SecretService mockSecretService; + private UpdateSecretResolver resolver; + + @BeforeMethod + public void before() { + mockClient = Mockito.mock(EntityClient.class); + mockSecretService = Mockito.mock(SecretService.class); + + resolver = new UpdateSecretResolver(mockClient, mockSecretService); + } + + private DataHubSecretValue createSecretAspect() { + DataHubSecretValue secretAspect = new DataHubSecretValue(); + secretAspect.setValue("encryptedvalue.updated"); + secretAspect.setName(TEST_INPUT.getName() + ".updated"); + secretAspect.setDescription(TEST_INPUT.getDescription() + ".updated"); + secretAspect.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + return secretAspect; + } + + @Test + public void testGetSuccess() throws Exception { + // with valid context + QueryContext mockContext = getMockAllowContext(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when(mockClient.exists(any(), any())).thenReturn(true); + Mockito.when(mockSecretService.encrypt(any())).thenReturn("encrypted_value"); + final EntityResponse entityResponse = new EntityResponse(); + final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + aspectMap.put( + SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createSecretAspect().data()))); + entityResponse.setAspects(aspectMap); + + when(mockClient.getV2(any(), any(), any(), any())).thenReturn(entityResponse); + + // Invoke the resolver + resolver.get(mockEnv).join(); + Mockito.verify(mockClient, Mockito.times(1)).ingestProposal(any(), any(), anyBoolean()); + } + + @Test( + description = "validate if nothing provided throws Exception", + expectedExceptions = {AuthorizationException.class, CompletionException.class}) + public void testGetUnauthorized() throws Exception { + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).join(); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(), any(Authentication.class), anyBoolean()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index a86d67fcd15c18..4dfce0e0c2ee83 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -16,12 +18,12 @@ import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubIngestionSourceKey; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -42,14 +44,13 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), Mockito.eq(""), Mockito.any(), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -85,6 +86,7 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -116,13 +118,12 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java new file mode 100644 index 00000000000000..21d1e0caa1bb21 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java @@ -0,0 +1,120 @@ +package com.linkedin.datahub.graphql.resolvers.load; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.generated.Dashboard; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.EntityService; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class BatchGetEntitiesResolverTest { + private EntityClient _entityClient; + private EntityService _entityService; + private DataFetchingEnvironment _dataFetchingEnvironment; + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _entityClient = mock(EntityClient.class); + } + + List getRequestEntities(List urnList) { + + return urnList.stream() + .map( + urn -> { + if (urn.startsWith("urn:li:dataset")) { + Dataset entity = new Dataset(); + entity.setUrn(urn); + return entity; + } else if (urn.startsWith("urn:li:dashboard")) { + Dashboard entity = new Dashboard(); + entity.setUrn(urn); + return entity; + } else { + throw new RuntimeException("Can't handle urn " + urn); + } + }) + .collect(Collectors.toList()); + } + + @Test + /** Tests that if responses come back out of order, we stitch them back correctly */ + public void testReordering() throws Exception { + Function entityProvider = mock(Function.class); + List inputEntities = + getRequestEntities(ImmutableList.of("urn:li:dataset:1", "urn:li:dataset:2")); + when(entityProvider.apply(any())).thenReturn(inputEntities); + BatchGetEntitiesResolver resolver = + new BatchGetEntitiesResolver( + ImmutableList.of(new DatasetType(_entityClient)), entityProvider); + + DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class); + when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry); + DataLoader mockDataLoader = mock(DataLoader.class); + when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader); + + Dataset mockResponseEntity1 = new Dataset(); + mockResponseEntity1.setUrn("urn:li:dataset:1"); + + Dataset mockResponseEntity2 = new Dataset(); + mockResponseEntity2.setUrn("urn:li:dataset:2"); + + CompletableFuture mockFuture = + CompletableFuture.completedFuture( + ImmutableList.of(mockResponseEntity2, mockResponseEntity1)); + when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); + when(_entityService.exists(any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); + List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); + assertEquals(batchGetResponse.size(), 2); + assertEquals(batchGetResponse.get(0), mockResponseEntity1); + assertEquals(batchGetResponse.get(1), mockResponseEntity2); + } + + @Test + /** Tests that if input list contains duplicates, we stitch them back correctly */ + public void testDuplicateUrns() throws Exception { + Function entityProvider = mock(Function.class); + List inputEntities = + getRequestEntities(ImmutableList.of("urn:li:dataset:foo", "urn:li:dataset:foo")); + when(entityProvider.apply(any())).thenReturn(inputEntities); + BatchGetEntitiesResolver resolver = + new BatchGetEntitiesResolver( + ImmutableList.of(new DatasetType(_entityClient)), entityProvider); + + DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class); + when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry); + DataLoader mockDataLoader = mock(DataLoader.class); + when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader); + + Dataset mockResponseEntity = new Dataset(); + mockResponseEntity.setUrn("urn:li:dataset:foo"); + + CompletableFuture mockFuture = + CompletableFuture.completedFuture(ImmutableList.of(mockResponseEntity)); + when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); + when(_entityService.exists(any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); + List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); + assertEquals(batchGetResponse.size(), 2); + assertEquals(batchGetResponse.get(0), mockResponseEntity); + assertEquals(batchGetResponse.get(1), mockResponseEntity); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 8fc5ab6ebb8287..bed8bf35682429 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -22,7 +22,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; @@ -74,7 +73,8 @@ public class MutableTypeBatchResolverTest { @Test public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); + BatchMutableType batchMutableType = new DatasetType(mockClient); @@ -167,7 +167,8 @@ public void testGetSuccess() throws Exception { @Test public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); + BatchMutableType batchMutableType = new DatasetType(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 3fee28bc317257..f8fe38187b30dd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,7 +23,8 @@ public class UpdateUserSettingResolverTest { @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_USER_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_USER_URN)), eq(true))) + .thenReturn(true); UpdateUserSettingResolver resolver = new UpdateUserSettingResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 74f88f95fc171e..4bd16d5311818c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,10 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -21,7 +22,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -46,16 +47,21 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -85,10 +91,10 @@ public void testGetSuccessNoExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); } @Test @@ -113,15 +119,19 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -148,7 +158,7 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test @@ -173,15 +183,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(Urn.class), eq(true))).thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -208,7 +219,7 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test @@ -233,24 +244,32 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { Mockito.eq(0L))) .thenReturn(oldOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -289,13 +308,13 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true)); } @Test @@ -309,8 +328,10 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(false); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -344,8 +365,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -399,10 +422,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(any(AspectsBatchImpl.class), Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 92a789530d6e4f..cb607adf45c0a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -20,7 +20,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -53,18 +53,24 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -100,10 +106,10 @@ public void testGetSuccessNoExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test @@ -132,26 +138,34 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); Mockito.when( mockService.exists( - Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity( - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER - .name())))) + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -187,10 +201,10 @@ public void testGetSuccessExistingOwners() throws Exception { verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test @@ -204,8 +218,10 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(false); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -257,9 +273,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -337,10 +356,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 7cef90ffee5121..84e0f6f282a7b3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.owner; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -17,7 +17,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveOwnersResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -38,22 +38,26 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -67,7 +71,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -89,9 +93,9 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners1); final Ownership oldOwners2 = @@ -105,16 +109,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -128,7 +136,7 @@ public void testGetSuccessExistingOwners() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -141,20 +149,23 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -168,7 +179,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -190,7 +201,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -204,10 +215,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); @@ -221,7 +229,7 @@ public void testGetEntityClientException() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index fd7baf6af74691..d18bc3aa31f898 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ownership; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -11,7 +12,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.OwnershipTypeKey; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -40,14 +40,13 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), Mockito.eq(""), Mockito.eq(null), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -90,13 +89,12 @@ public void testGetUnauthorized() throws Exception { Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index 6c475cdc7f5a85..340e8cbf8514c1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -20,7 +20,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -33,7 +32,6 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; -import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -120,14 +118,7 @@ public void testListPosts() throws Exception { ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); when(_entityClient.search( - eq(POST_ENTITY_NAME), - any(), - eq(null), - any(), - anyInt(), - anyInt(), - eq(_authentication), - Mockito.eq(new SearchFlags().setFulltext(true)))) + any(), eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt())) .thenReturn(roleSearchResult); when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())) .thenReturn(_entityResponseMap); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolverTest.java new file mode 100644 index 00000000000000..073e925c26b2ed --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolverTest.java @@ -0,0 +1,115 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.post.PostService; +import com.linkedin.common.Media; +import com.linkedin.common.url.Url; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.MediaType; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.generated.UpdateMediaInput; +import com.linkedin.datahub.graphql.generated.UpdatePostContentInput; +import com.linkedin.datahub.graphql.generated.UpdatePostInput; +import graphql.schema.DataFetchingEnvironment; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpdatePostResolverTest { + + private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:post:post-id"); + private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; + private static final String POST_MEDIA_LOCATION = + "https://datahubproject.io/img/datahub-logo-color-light-horizontal.svg"; + private static final PostContentType POST_CONTENT_TYPE = PostContentType.LINK; + private static final String POST_TITLE = "title"; + private static final String POST_DESCRIPTION = "description"; + private static final String POST_LINK = "https://datahubproject.io"; + private PostService postService; + private UpdatePostResolver resolver; + private DataFetchingEnvironment dataFetchingEnvironment; + private Authentication authentication; + + @BeforeMethod + public void setupTest() throws Exception { + postService = mock(PostService.class); + dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + authentication = mock(Authentication.class); + + resolver = new UpdatePostResolver(postService); + } + + @Test + public void testNotAuthorizedFails() { + QueryContext mockContext = getMockDenyContext(); + when(dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> resolver.get(dataFetchingEnvironment).join()); + } + + @Test + public void testUpdatePost() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(dataFetchingEnvironment.getContext()).thenReturn(mockContext); + when(mockContext.getAuthentication()).thenReturn(authentication); + + UpdateMediaInput media = new UpdateMediaInput(); + media.setType(POST_MEDIA_TYPE); + media.setLocation(POST_MEDIA_LOCATION); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(postService.mapMedia(POST_MEDIA_TYPE.toString(), POST_MEDIA_LOCATION)) + .thenReturn(mediaObj); + + UpdatePostContentInput content = new UpdatePostContentInput(); + content.setTitle(POST_TITLE); + content.setDescription(POST_DESCRIPTION); + content.setLink(POST_LINK); + content.setContentType(POST_CONTENT_TYPE); + content.setMedia(media); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); + + UpdatePostInput input = new UpdatePostInput(); + input.setUrn(TEST_URN.toString()); + input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); + input.setContent(content); + when(dataFetchingEnvironment.getArgument("input")).thenReturn(input); + when(postService.updatePost( + TEST_URN, PostType.HOME_PAGE_ANNOUNCEMENT.toString(), postContentObj, authentication)) + .thenReturn(true); + + assertTrue(resolver.get(dataFetchingEnvironment).join()); + verify(postService, times(1)).updatePost(any(), any(), any(), any()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 8a56b142e5b5ef..9ed1d5001b75c3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.query; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -17,7 +17,6 @@ import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -62,6 +61,7 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq( input.getQuery() == null @@ -73,9 +73,7 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { .setField(ListQueriesResolver.CREATED_AT_FIELD) .setOrder(SortOrder.DESCENDING)), Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)))) + Mockito.eq(input.getCount()))) .thenReturn( new SearchResult() .setFrom(0) @@ -116,13 +114,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq("*"), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.anyInt()); } @Test @@ -132,13 +129,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.anyInt()); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 9197d1b18c0c9c..e9d5ef00e74dd7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -9,6 +9,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateInviteTokenInput; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -42,7 +43,7 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))) + when(_inviteTokenService.getInviteToken(any(OperationContext.class), any(), eq(true))) .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index 8e761454cb06c3..78d848e882b6bf 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -9,6 +9,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GetInviteTokenInput; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -19,13 +20,15 @@ public class GetInviteTokenResolverTest { private GetInviteTokenResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; private Authentication _authentication; + private OperationContext opContext; @BeforeMethod public void setupTest() throws Exception { _inviteTokenService = mock(InviteTokenService.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); _authentication = mock(Authentication.class); - + opContext = mock(OperationContext.class); + when(opContext.getAuthentication()).thenReturn(_authentication); _resolver = new GetInviteTokenResolver(_inviteTokenService); } @@ -42,7 +45,7 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))) + when(_inviteTokenService.getInviteToken(any(OperationContext.class), any(), eq(false))) .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index d956295faa180f..ab2f852d83040a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -19,7 +19,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -27,7 +26,6 @@ import com.linkedin.policy.DataHubRoleInfo; import graphql.schema.DataFetchingEnvironment; import java.util.Map; -import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -102,13 +100,7 @@ public void testListRoles() throws Exception { new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); when(_entityClient.search( - eq(DATAHUB_ROLE_ENTITY_NAME), - any(), - any(), - anyInt(), - anyInt(), - any(), - Mockito.eq(new SearchFlags().setFulltext(true)))) + any(), eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt())) .thenReturn(roleSearchResult); when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())) .thenReturn(_entityResponseMap); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c7d397c5a4a73a..58fbadf7e0d7fd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -15,7 +16,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; @@ -27,6 +28,7 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; @@ -52,6 +54,7 @@ public static void testApplyViewNullBaseFilter() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + FormService mockFormService = Mockito.mock(FormService.class); List facets = ImmutableList.of("platform", "domains"); @@ -71,7 +74,7 @@ public static void testApplyViewNullBaseFilter() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -102,6 +105,7 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); @@ -122,7 +126,7 @@ public static void testApplyViewBaseFilter() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -166,6 +170,7 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of("platform"); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); EntityClient mockClient = @@ -184,7 +189,7 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); @@ -217,6 +222,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of(); + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, info); EntityClient mockClient = @@ -235,7 +241,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( @@ -267,6 +273,7 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, null); List searchEntityTypes = @@ -290,7 +297,7 @@ public static void testApplyViewViewDoesNotExist() throws Exception { .setMetadata(new SearchResultMetadata())); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); @@ -306,23 +313,24 @@ public static void testApplyViewViewDoesNotExist() throws Exception { @Test public static void testErrorFetchingResults() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.anyList(), Mockito.anyString(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); final AggregateAcrossEntitiesResolver resolver = - new AggregateAcrossEntitiesResolver(mockClient, mockService); + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); @@ -385,14 +393,13 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), Mockito.eq(facets))) .thenReturn(result); return client; @@ -409,14 +416,13 @@ private static void verifyMockEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), Mockito.eq(facets)); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 3b69337acfbd0e..5acd91be1001f4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -73,6 +74,7 @@ public static void testAutoCompleteResolverSuccess( @Test public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); + // Daatasets EntityClient mockClient = initMockEntityClient( @@ -139,6 +141,7 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); + EntityClient mockClient = initMockEntityClient( Constants.DATASET_ENTITY_NAME, @@ -192,17 +195,18 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( + any(), Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), - Mockito.eq(10), - Mockito.any(Authentication.class)); + Mockito.eq(10)); } @Test public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); + final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver( ImmutableList.of(new DatasetType(mockClient)), viewService); @@ -225,11 +229,11 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.autoComplete( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class))) + Mockito.eq(limit))) .thenReturn(result); return client; } @@ -246,11 +250,11 @@ private static void verifyMockEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class)); + Mockito.eq(limit)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index 29a2b3081aefe3..25e374c766deba 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -2,15 +2,15 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static org.mockito.ArgumentMatchers.any; -import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.AggregationMetadata; @@ -108,14 +108,14 @@ public static void testGetQuickFiltersFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.anyList(), Mockito.anyString(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -259,7 +259,7 @@ private static QuickFilter createQuickFilter( quickFilter.setField(field); quickFilter.setValue(value); if (entityUrn != null) { - quickFilter.setEntity(UrnToEntityMapper.map(UrnUtils.getUrn(entityUrn))); + quickFilter.setEntity(UrnToEntityMapper.map(null, UrnUtils.getUrn(entityUrn))); } return quickFilter; } @@ -294,14 +294,14 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn(result); return client; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index d0bbfd126b9b96..1ef44bbed4cbcd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.mockito.ArgumentMatchers.any; import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; @@ -15,7 +16,7 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; @@ -431,14 +432,14 @@ public static void testApplyViewErrorFetchingView() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( mockClient.searchAcrossEntities( + any(), Mockito.anyList(), Mockito.anyString(), Mockito.any(), Mockito.anyInt(), Mockito.anyInt(), Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenThrow(new RemoteInvocationException()); final SearchAcrossEntitiesResolver resolver = @@ -480,14 +481,13 @@ private static EntityClient initMockEntityClient( EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class))) + Mockito.eq(null))) .thenReturn(result); return client; } @@ -502,14 +502,13 @@ private static void verifyMockEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class)); + Mockito.eq(null)); } private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index 273f7156c12a8b..153e98149ff1a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -14,7 +14,8 @@ import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.AggregationMetadataArray; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchEntityArray; @@ -22,8 +23,11 @@ import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.io.InputStream; import java.util.Collections; import java.util.List; +import org.mockito.ArgumentCaptor; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @@ -43,13 +47,28 @@ public class SearchAcrossLineageResolverTest { private Authentication _authentication; private SearchAcrossLineageResolver _resolver; + private EntityRegistry _entityRegistry; + @BeforeMethod public void setupTest() { _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); _authentication = mock(Authentication.class); - _resolver = new SearchAcrossLineageResolver(_entityClient); + _entityRegistry = mock(EntityRegistry.class); + _resolver = new SearchAcrossLineageResolver(_entityClient, _entityRegistry); + } + + @Test + public void testAllEntitiesInitialization() { + InputStream inputStream = ClassLoader.getSystemResourceAsStream("entity-registry.yml"); + EntityRegistry entityRegistry = new ConfigEntityRegistry(inputStream); + SearchAcrossLineageResolver resolver = + new SearchAcrossLineageResolver(_entityClient, entityRegistry); + assertTrue(resolver._allEntities.contains("dataset")); + assertTrue(resolver._allEntities.contains("dataFlow")); + // Test for case sensitivity + assertFalse(resolver._allEntities.contains("dataflow")); } @Test @@ -86,8 +105,10 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchEntity.setMatchedFields(new MatchedFieldArray()); lineageSearchEntity.setPaths(new UrnArrayArray()); lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); + ArgumentCaptor opContext = ArgumentCaptor.forClass(OperationContext.class); when(_entityClient.searchAcrossLineage( + opContext.capture(), eq(UrnUtils.getUrn(SOURCE_URN_STRING)), eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), anyList(), @@ -96,16 +117,18 @@ public void testSearchAcrossLineage() throws Exception { any(), eq(null), eq(START), - eq(COUNT), - eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))) + eq(COUNT))) .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(results.getCount(), 10); assertEquals(results.getTotal(), 1); + assertEquals( + opContext.getValue().getSearchContext().getLineageFlags().getStartTimeMillis(), + START_TIMESTAMP_MILLIS); + assertEquals( + opContext.getValue().getSearchContext().getLineageFlags().getEndTimeMillis(), + END_TIMESTAMP_MILLIS); final List entities = results.getSearchResults(); assertEquals(entities.size(), 1); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 24724cb8e23ad3..a5310a052f613c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,14 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.search; import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; -import com.datahub.authentication.Authentication; +import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.GroupingCriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; @@ -19,6 +22,22 @@ import org.testng.annotations.Test; public class SearchResolverTest { + + private com.linkedin.metadata.query.SearchFlags setConvertSchemaFieldsToDatasets( + com.linkedin.metadata.query.SearchFlags flags, boolean value) { + if (value) { + return flags.setGroupingSpec( + new com.linkedin.metadata.query.GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + new com.linkedin.metadata.query.GroupingCriterion() + .setBaseEntityType(SCHEMA_FIELD_ENTITY_NAME) + .setGroupingEntityType(DATASET_ENTITY_NAME)))); + } else { + return flags.setGroupingSpec(null, SetMode.REMOVE_IF_NULL); + } + } + @Test public void testDefaultSearchFlags() throws Exception { EntityClient mockClient = initMockSearchEntityClient(); @@ -40,12 +59,16 @@ public void testDefaultSearchFlags() throws Exception { null, 0, 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false)); + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), + true)); } @Test @@ -77,12 +100,14 @@ public void testOverrideSearchFlags() throws Exception { null, 1, 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true)); + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true), + false)); } @Test @@ -107,26 +132,29 @@ public void testNonWildCardSearchFlags() throws Exception { null, 0, 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false)); + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), + true)); } private EntityClient initMockSearchEntityClient() throws Exception { EntityClient client = Mockito.mock(EntityClient.class); Mockito.when( client.search( + any(), Mockito.anyString(), Mockito.anyString(), Mockito.any(), Mockito.any(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.any())) + Mockito.anyInt())) .thenReturn( new SearchResult() .setEntities(new SearchEntityArray()) @@ -149,14 +177,13 @@ private void verifyMockSearchEntityClient( throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .search( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(sortCriterion), Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags)); + Mockito.eq(limit)); } private SearchResolverTest() {} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolverTest.java new file mode 100644 index 00000000000000..3c97d64a745bc8 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolverTest.java @@ -0,0 +1,242 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.PropertyValueInput; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertyInputParams; +import com.linkedin.datahub.graphql.generated.UpsertStructuredPropertiesInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import graphql.com.google.common.collect.ImmutableList; +import graphql.com.google.common.collect.ImmutableSet; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UpsertStructuredPropertiesResolverTest { + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String PROPERTY_URN_1 = "urn:li:structuredProperty:test1"; + private static final String PROPERTY_URN_2 = "urn:li:structuredProperty:test2"; + + private static final StructuredPropertyInputParams PROP_INPUT_1 = + new StructuredPropertyInputParams( + PROPERTY_URN_1, ImmutableList.of(new PropertyValueInput("test1", null))); + private static final StructuredPropertyInputParams PROP_INPUT_2 = + new StructuredPropertyInputParams( + PROPERTY_URN_2, ImmutableList.of(new PropertyValueInput("test2", null))); + private static final UpsertStructuredPropertiesInput TEST_INPUT = + new UpsertStructuredPropertiesInput( + TEST_DATASET_URN, ImmutableList.of(PROP_INPUT_1, PROP_INPUT_2)); + + @Test + public void testGetSuccessUpdateExisting() throws Exception { + // mock it so that this entity already has values for the given two properties + StructuredPropertyValueAssignmentArray initialProperties = + new StructuredPropertyValueAssignmentArray(); + PrimitivePropertyValueArray propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create("hello")); + initialProperties.add( + new StructuredPropertyValueAssignment() + .setPropertyUrn(UrnUtils.getUrn(PROPERTY_URN_1)) + .setValues(propertyValues)); + initialProperties.add( + new StructuredPropertyValueAssignment() + .setPropertyUrn(UrnUtils.getUrn(PROPERTY_URN_2)) + .setValues(propertyValues)); + EntityClient mockEntityClient = initMockEntityClient(true, initialProperties); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + com.linkedin.datahub.graphql.generated.StructuredProperties result = + resolver.get(mockEnv).get(); + + assertEquals(result.getProperties().size(), 2); + assertEquals(result.getProperties().get(0).getStructuredProperty().getUrn(), PROPERTY_URN_1); + assertEquals(result.getProperties().get(0).getValues().size(), 1); + assertEquals( + result.getProperties().get(0).getValues().get(0).toString(), + new StringValue("test1").toString()); + assertEquals(result.getProperties().get(1).getStructuredProperty().getUrn(), PROPERTY_URN_2); + assertEquals(result.getProperties().get(1).getValues().size(), 1); + assertEquals( + result.getProperties().get(1).getValues().get(0).toString(), + new StringValue("test2").toString()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + public void testGetSuccessNoExistingProps() throws Exception { + // mock so the original entity has no structured properties + EntityClient mockEntityClient = initMockEntityClient(true, null); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + com.linkedin.datahub.graphql.generated.StructuredProperties result = + resolver.get(mockEnv).get(); + + assertEquals(result.getProperties().size(), 2); + assertEquals(result.getProperties().get(0).getStructuredProperty().getUrn(), PROPERTY_URN_2); + assertEquals(result.getProperties().get(0).getValues().size(), 1); + assertEquals( + result.getProperties().get(0).getValues().get(0).toString(), + new StringValue("test2").toString()); + assertEquals(result.getProperties().get(1).getStructuredProperty().getUrn(), PROPERTY_URN_1); + assertEquals(result.getProperties().get(1).getValues().size(), 1); + assertEquals( + result.getProperties().get(1).getValues().get(0).toString(), + new StringValue("test1").toString()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + public void testGetSuccessOneExistingOneNew() throws Exception { + // mock so the original entity has one of the input props and one is new + StructuredPropertyValueAssignmentArray initialProperties = + new StructuredPropertyValueAssignmentArray(); + PrimitivePropertyValueArray propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create("hello")); + initialProperties.add( + new StructuredPropertyValueAssignment() + .setPropertyUrn(UrnUtils.getUrn(PROPERTY_URN_1)) + .setValues(propertyValues)); + EntityClient mockEntityClient = initMockEntityClient(true, initialProperties); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + com.linkedin.datahub.graphql.generated.StructuredProperties result = + resolver.get(mockEnv).get(); + + assertEquals(result.getProperties().size(), 2); + assertEquals(result.getProperties().get(0).getStructuredProperty().getUrn(), PROPERTY_URN_1); + assertEquals(result.getProperties().get(0).getValues().size(), 1); + assertEquals( + result.getProperties().get(0).getValues().get(0).toString(), + new StringValue("test1").toString()); + assertEquals(result.getProperties().get(1).getStructuredProperty().getUrn(), PROPERTY_URN_2); + assertEquals(result.getProperties().get(1).getValues().size(), 1); + assertEquals( + result.getProperties().get(1).getValues().get(0).toString(), + new StringValue("test2").toString()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + @Test + public void testThrowsError() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false, null); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); + } + + private EntityClient initMockEntityClient( + final boolean shouldSucceed, @Nullable StructuredPropertyValueAssignmentArray properties) + throws Exception { + Urn assetUrn = UrnUtils.getUrn(TEST_DATASET_URN); + EntityClient client = Mockito.mock(EntityClient.class); + + Mockito.when(client.exists(Mockito.eq(assetUrn), Mockito.any())).thenReturn(true); + + if (!shouldSucceed) { + Mockito.doThrow(new RuntimeException()) + .when(client) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(Authentication.class)); + } else { + if (properties == null) { + Mockito.when( + client.getV2( + Mockito.eq(assetUrn.getEntityType()), + Mockito.eq(assetUrn), + Mockito.eq(ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(null); + } else { + StructuredProperties structuredProps = new StructuredProperties(); + structuredProps.setProperties(properties); + EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + aspectMap.put( + STRUCTURED_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(structuredProps.data()))); + EntityResponse response = new EntityResponse(); + response.setAspects(aspectMap); + Mockito.when( + client.getV2( + Mockito.eq(assetUrn.getEntityType()), + Mockito.eq(assetUrn), + Mockito.eq(ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(response); + } + } + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 340802cde467b8..b8c4ce21949373 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; @@ -17,7 +17,8 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddTagsResolver; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; @@ -42,9 +43,12 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -73,10 +77,10 @@ public void testGetSuccessNoExistingTags() throws Exception { verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -97,9 +101,12 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -128,10 +135,10 @@ public void testGetSuccessExistingTags() throws Exception { verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -145,8 +152,10 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -173,8 +182,10 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); @@ -210,12 +221,11 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 71354627b11452..82dd13ee29e8a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; @@ -19,7 +19,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.List; @@ -54,11 +54,15 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -93,10 +97,10 @@ public void testGetSuccessNoExistingTags() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -124,11 +128,15 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -163,10 +171,10 @@ public void testGetSuccessExistingTags() throws Exception { verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test @@ -180,8 +188,10 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -197,10 +207,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -220,9 +227,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); @@ -240,10 +250,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -266,10 +273,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -278,10 +282,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index 8cd10afee293ea..83de3acfb4c94d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -2,10 +2,10 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; @@ -20,7 +20,7 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; @@ -56,11 +56,15 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -128,11 +132,15 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.eq(0L))) .thenReturn(oldTags2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -179,9 +187,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); @@ -199,10 +210,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -225,10 +233,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -237,10 +242,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index 6ae72fcbb72688..f7929012ccb688 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -2,6 +2,7 @@ import static com.linkedin.datahub.graphql.TestUtils.*; import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -47,7 +48,8 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.eq(0L))) .thenReturn(oldTagProperties); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -69,7 +71,7 @@ public void testGetSuccessExistingProperties() throws Exception { .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -86,7 +88,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.eq(0))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -131,7 +134,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index cb827a42333b23..d0697df3f2f6c0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -16,7 +16,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -35,14 +35,17 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -52,20 +55,19 @@ public void testGetSuccessNoExistingTerms() throws Exception { AddTermsInput input = new AddTermsInput( ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test @@ -82,14 +84,17 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -99,20 +104,19 @@ public void testGetSuccessExistingTerms() throws Exception { AddTermsInput input = new AddTermsInput( ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), eq(false)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test @@ -121,13 +125,15 @@ public void testGetFailureTermDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -136,15 +142,12 @@ public void testGetFailureTermDoesNotExist() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -153,13 +156,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); @@ -168,15 +173,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -189,16 +191,13 @@ public void testGetUnauthorized() throws Exception { DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test @@ -207,10 +206,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); @@ -219,7 +215,7 @@ public void testGetEntityClientException() throws Exception { QueryContext mockContext = getMockAllowContext(); AddTermsInput input = new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 7df19fad52689f..b3700632f56cdd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -17,7 +17,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchAddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -38,24 +38,26 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -69,17 +71,17 @@ public void testGetSuccessNoExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test @@ -96,24 +98,26 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -127,17 +131,17 @@ public void testGetSuccessExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); Mockito.verify(mockService, Mockito.times(1)) - .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); + .exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test @@ -146,13 +150,14 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -164,7 +169,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { new BatchAddTermsInput( ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -177,20 +182,22 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -204,7 +211,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -225,7 +232,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -239,10 +246,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); @@ -253,7 +257,7 @@ public void testGetEntityClientException() throws Exception { new BatchAddTermsInput( ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index 659ce40542a9cf..a76a813802b94a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.term; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.eq; import static org.testng.Assert.*; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -17,7 +17,7 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -38,22 +38,26 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -66,7 +70,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -89,9 +93,9 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms1); final GlossaryTerms oldTerms2 = @@ -104,16 +108,20 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -126,7 +134,7 @@ public void testGetSuccessExistingTerms() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -139,20 +147,23 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); Mockito.when( mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -165,7 +176,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -186,7 +197,7 @@ public void testGetUnauthorized() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -200,10 +211,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class) .when(mockService) - .ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean()); + .ingestProposal(Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); @@ -216,7 +224,7 @@ public void testGetEntityClientException() throws Exception { ImmutableList.of( new ResourceRefInput(TEST_ENTITY_URN_1, null, null), new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 6075425d09c050..5e3cd539cade76 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,16 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -34,13 +33,12 @@ public void testGetSuccess() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.TEST_ENTITY_NAME), Mockito.eq(""), Mockito.eq(Collections.emptyMap()), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -80,14 +78,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) - .search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + .search(any(), any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); } @Test @@ -96,14 +87,7 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + .search(any(), any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 8c30c17201bc65..a3b9e25e992259 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -15,7 +15,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -44,6 +43,7 @@ public void testGetSuccessInput() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), Mockito.eq(""), Mockito.eq( @@ -67,9 +67,7 @@ public void testGetSuccessInput() throws Exception { .setNegated(false)))))))), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -112,13 +110,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test @@ -128,13 +125,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 85e20cd656fcd3..99b0e76976748e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListMyViewsInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -46,6 +45,7 @@ public void testGetSuccessInput1() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), Mockito.eq(""), Mockito.eq( @@ -78,9 +78,7 @@ public void testGetSuccessInput1() throws Exception { .setNegated(false)))))))), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -113,6 +111,7 @@ public void testGetSuccessInput2() throws Exception { Mockito.when( mockClient.search( + any(), Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), Mockito.eq(""), Mockito.eq( @@ -135,9 +134,7 @@ public void testGetSuccessInput2() throws Exception { .setNegated(false)))))))), Mockito.any(), Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))) + Mockito.eq(20))) .thenReturn( new SearchResult() .setFrom(0) @@ -178,13 +175,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockClient, Mockito.times(0)) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test @@ -194,13 +190,12 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c975c7ebb0507c..82a4722985896c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -23,8 +24,10 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.AssertionKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -87,6 +90,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java index 54b341fc1865a5..ef69278df61a7a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java @@ -8,7 +8,8 @@ public class EmbedMapperTest { @Test public void testEmbedMapper() throws Exception { final String renderUrl = "https://www.google.com"; - final Embed result = EmbedMapper.map(new com.linkedin.common.Embed().setRenderUrl(renderUrl)); + final Embed result = + EmbedMapper.map(null, new com.linkedin.common.Embed().setRenderUrl(renderUrl)); Assert.assertEquals(result.getRenderUrl(), renderUrl); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 1e2acd0db455cd..15ebc975063da5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.container; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -40,8 +41,10 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.ContainerKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -102,7 +105,7 @@ public class ContainerTypeTest { @Test public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); + EntityClient client = mock(EntityClient.class); Urn containerUrn1 = Urn.createFromString(TEST_CONTAINER_1_URN); Urn containerUrn2 = Urn.createFromString(TEST_CONTAINER_2_URN); @@ -157,8 +160,12 @@ public void testBatchLoad() throws Exception { ContainerType type = new ContainerType(client); - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + QueryContext mockContext = mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); @@ -200,7 +207,7 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .batchGetV2( @@ -211,8 +218,8 @@ public void testBatchLoadClientException() throws Exception { ContainerType type = new ContainerType(mockClient); // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + QueryContext context = mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(mock(Authentication.class)); assertThrows( RuntimeException.class, () -> diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 667d943b1095d0..1bd1f96a7efbd4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -35,6 +35,7 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -142,6 +143,9 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.userContextNoSearchAuthorization(TEST_ACTOR_URN)); + List> result = type.batchLoad( ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index b28dd287e3fe4a..8bfbdbe282ad65 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -50,7 +50,7 @@ public void testDatasetPropertiesMapperWithCreatedAndLastModified() { .setEntityName(Constants.DATASET_ENTITY_NAME) .setUrn(TEST_DATASET_URN) .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); + final Dataset actual = DatasetMapper.map(null, response); final Dataset expected = new Dataset(); expected.setUrn(TEST_DATASET_URN.toString()); @@ -100,7 +100,7 @@ public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { .setEntityName(Constants.DATASET_ENTITY_NAME) .setUrn(TEST_DATASET_URN) .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); + final Dataset actual = DatasetMapper.map(null, response); final Dataset expected = new Dataset(); expected.setUrn(TEST_DATASET_URN.toString()); @@ -154,7 +154,7 @@ public void testDatasetPropertiesMapperWithoutTimestampActors() { .setEntityName(Constants.DATASET_ENTITY_NAME) .setUrn(TEST_DATASET_URN) .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); + final Dataset actual = DatasetMapper.map(null, response); final Dataset expected = new Dataset(); expected.setUrn(TEST_DATASET_URN.toString()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 612136d1f9164a..42220091f5853b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -48,7 +48,7 @@ public void testMapperFullProfile() { .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); final EnvelopedAspect inputAspect = new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); - final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); + final DatasetProfile actual = DatasetProfileMapper.map(null, inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); @@ -167,7 +167,7 @@ public void testMapperPartialProfile() { .setUniqueProportion(40.5f)))); final EnvelopedAspect inputAspect = new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); - final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); + final DatasetProfile actual = DatasetProfileMapper.map(null, inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java new file mode 100644 index 00000000000000..8cce03389debb4 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java @@ -0,0 +1,96 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import java.util.Collections; +import org.testng.annotations.Test; + +public class IncidentMapperTest { + + @Test + public void testMap() throws Exception { + EntityResponse entityResponse = new EntityResponse(); + Urn urn = Urn.createFromString("urn:li:incident:1"); + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn assertionUrn = Urn.createFromString("urn:li:assertion:test"); + entityResponse.setUrn(urn); + + EnvelopedAspect envelopedIncidentInfo = new EnvelopedAspect(); + IncidentInfo incidentInfo = new IncidentInfo(); + incidentInfo.setType(IncidentType.OPERATIONAL); + incidentInfo.setCustomType("Custom Type"); + incidentInfo.setTitle("Test Incident", SetMode.IGNORE_NULL); + incidentInfo.setDescription("This is a test incident", SetMode.IGNORE_NULL); + incidentInfo.setPriority(1, SetMode.IGNORE_NULL); + incidentInfo.setEntities(new UrnArray(Collections.singletonList(urn))); + + IncidentSource source = new IncidentSource(); + source.setType(IncidentSourceType.MANUAL); + source.setSourceUrn(assertionUrn); + incidentInfo.setSource(source); + + AuditStamp lastStatus = new AuditStamp(); + lastStatus.setTime(1000L); + lastStatus.setActor(userUrn); + incidentInfo.setCreated(lastStatus); + + IncidentStatus status = new IncidentStatus(); + status.setState(IncidentState.ACTIVE); + status.setLastUpdated(lastStatus); + status.setMessage("This incident is open.", SetMode.IGNORE_NULL); + incidentInfo.setStatus(status); + + AuditStamp created = new AuditStamp(); + created.setTime(1000L); + created.setActor(userUrn); + incidentInfo.setCreated(created); + + envelopedIncidentInfo.setValue(new Aspect(incidentInfo.data())); + entityResponse.setAspects( + new EnvelopedAspectMap( + Collections.singletonMap(Constants.INCIDENT_INFO_ASPECT_NAME, envelopedIncidentInfo))); + + Incident incident = IncidentMapper.map(null, entityResponse); + + assertNotNull(incident); + assertEquals(incident.getUrn(), "urn:li:incident:1"); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getCustomType(), "Custom Type"); + assertEquals( + incident.getIncidentType().toString(), + com.linkedin.datahub.graphql.generated.IncidentType.OPERATIONAL.toString()); + assertEquals(incident.getTitle(), "Test Incident"); + assertEquals(incident.getDescription(), "This is a test incident"); + assertEquals(incident.getPriority().intValue(), 1); + assertEquals( + incident.getSource().getType().toString(), + com.linkedin.datahub.graphql.generated.IncidentSourceType.MANUAL.toString()); + assertEquals(incident.getSource().getSource().getUrn(), assertionUrn.toString()); + assertEquals( + incident.getStatus().getState().toString(), + com.linkedin.datahub.graphql.generated.IncidentState.ACTIVE.toString()); + assertEquals(incident.getStatus().getMessage(), "This incident is open."); + assertEquals(incident.getStatus().getLastUpdated().getTime().longValue(), 1000L); + assertEquals(incident.getStatus().getLastUpdated().getActor(), userUrn.toString()); + assertEquals(incident.getCreated().getTime().longValue(), 1000L); + assertEquals(incident.getCreated().getActor(), userUrn.toString()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java new file mode 100644 index 00000000000000..6f06d20c972278 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java @@ -0,0 +1,181 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.r2.RemoteInvocationException; +import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IncidentTypeTest { + + private static final String TEST_INCIDENT_URN = "urn:li:incident:guid-1"; + private static Urn testAssertionUrn; + private static Urn testUserUrn; + private static Urn testDatasetUrn; + + static { + try { + testAssertionUrn = Urn.createFromString("urn:li:assertion:test"); + testUserUrn = Urn.createFromString("urn:li:corpuser:test"); + testDatasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); + } catch (Exception ignored) { + // ignored + } + } + + private static final IncidentKey TEST_INCIDENT_KEY = new IncidentKey().setId("guid-1"); + private static final IncidentInfo TEST_INCIDENT_INFO = + new IncidentInfo() + .setType(IncidentType.OPERATIONAL) + .setCustomType("Custom Type") + .setDescription("Description") + .setPriority(5) + .setTitle("Title") + .setEntities(new UrnArray(ImmutableList.of(testDatasetUrn))) + .setSource( + new IncidentSource() + .setType(IncidentSourceType.MANUAL) + .setSourceUrn(testAssertionUrn)) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage("Message") + .setLastUpdated(new AuditStamp().setTime(1L).setActor(testUserUrn))) + .setCreated(new AuditStamp().setTime(0L).setActor(testUserUrn)); + private static final String TEST_INCIDENT_URN_2 = "urn:li:incident:guid-2"; + + @Test + public void testBatchLoad() throws Exception { + + EntityClient client = Mockito.mock(EntityClient.class); + + Urn incidentUrn1 = Urn.createFromString(TEST_INCIDENT_URN); + Urn incidentUrn2 = Urn.createFromString(TEST_INCIDENT_URN_2); + + Map incident1Aspects = new HashMap<>(); + incident1Aspects.put( + Constants.INCIDENT_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_INCIDENT_KEY.data()))); + incident1Aspects.put( + Constants.INCIDENT_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_INCIDENT_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(incidentUrn1, incidentUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.incident.IncidentType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + incidentUrn1, + new EntityResponse() + .setEntityName(Constants.INCIDENT_ENTITY_NAME) + .setUrn(incidentUrn1) + .setAspects(new EnvelopedAspectMap(incident1Aspects)))); + + com.linkedin.datahub.graphql.types.incident.IncidentType type = + new com.linkedin.datahub.graphql.types.incident.IncidentType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + + List> result = + type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(incidentUrn1, incidentUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.incident.IncidentType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); + + assertEquals(result.size(), 2); + + Incident incident = result.get(0).getData(); + assertEquals(incident.getUrn(), TEST_INCIDENT_URN.toString()); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getIncidentType().toString(), TEST_INCIDENT_INFO.getType().toString()); + assertEquals(incident.getTitle(), TEST_INCIDENT_INFO.getTitle()); + assertEquals(incident.getDescription(), TEST_INCIDENT_INFO.getDescription()); + assertEquals(incident.getCustomType(), TEST_INCIDENT_INFO.getCustomType()); + assertEquals( + incident.getStatus().getState().toString(), + TEST_INCIDENT_INFO.getStatus().getState().toString()); + assertEquals(incident.getStatus().getMessage(), TEST_INCIDENT_INFO.getStatus().getMessage()); + assertEquals( + incident.getStatus().getLastUpdated().getTime(), + TEST_INCIDENT_INFO.getStatus().getLastUpdated().getTime()); + assertEquals( + incident.getStatus().getLastUpdated().getActor(), + TEST_INCIDENT_INFO.getStatus().getLastUpdated().getActor().toString()); + assertEquals( + incident.getSource().getType().toString(), + TEST_INCIDENT_INFO.getSource().getType().toString()); + assertEquals( + incident.getSource().getSource().getUrn(), + TEST_INCIDENT_INFO.getSource().getSourceUrn().toString()); + assertEquals( + incident.getCreated().getActor(), TEST_INCIDENT_INFO.getCreated().getActor().toString()); + assertEquals(incident.getCreated().getTime(), TEST_INCIDENT_INFO.getCreated().getTime()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.incident.IncidentType type = + new com.linkedin.datahub.graphql.types.incident.IncidentType(mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), context)); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java new file mode 100644 index 00000000000000..927d5185a71c72 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java @@ -0,0 +1,73 @@ +package com.linkedin.datahub.graphql.types.mappers; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.MatchedField; +import com.linkedin.metadata.entity.validation.ValidationUtils; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.snapshot.Snapshot; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.net.URISyntaxException; +import java.util.List; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class MapperUtilsTest { + private EntityRegistry entityRegistry; + + @BeforeTest + public void setup() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + } + + @Test + public void testMatchedFieldValidation() throws URISyntaxException { + final Urn urn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:s3,urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29,PROD)"); + final Urn invalidUrn = + Urn.createFromString( + "urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29"); + assertThrows( + IllegalArgumentException.class, + () -> ValidationUtils.validateUrn(entityRegistry, invalidUrn)); + + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization(entityRegistry)); + + List actualMatched = + MapperUtils.getMatchedFieldEntry( + mockContext, + List.of( + buildSearchMatchField(urn.toString()), + buildSearchMatchField(invalidUrn.toString()))); + + assertEquals(actualMatched.size(), 2, "Matched fields should be 2"); + assertEquals( + actualMatched.stream().filter(matchedField -> matchedField.getEntity() != null).count(), + 1, + "With urn should be 1"); + } + + private static com.linkedin.metadata.search.MatchedField buildSearchMatchField( + String highlightValue) { + com.linkedin.metadata.search.MatchedField field = + new com.linkedin.metadata.search.MatchedField(); + field.setName("testField"); + field.setValue(highlightValue); + return field; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index f88c8285e20df9..b5862ef2f8feac 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -45,6 +46,7 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.notebook.EditableNotebookProperties; import com.linkedin.notebook.NotebookCell; import com.linkedin.notebook.NotebookCellArray; @@ -54,6 +56,7 @@ import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -193,6 +196,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index c8f694320d88ae..dcf81dac3fbd9b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.query; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -18,6 +19,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.query.QueryLanguage; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; @@ -27,6 +29,7 @@ import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -124,6 +127,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad( ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); @@ -178,6 +185,10 @@ public void testBatchLoadNullEntity() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad( ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index f02fd38e2ca7ca..557a77601b42c1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.view; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +22,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -32,6 +34,7 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -179,6 +182,10 @@ public void testBatchLoadValidView() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); @@ -245,6 +252,10 @@ public void testBatchLoadInvalidView() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java new file mode 100644 index 00000000000000..b4097d9dd045df --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java @@ -0,0 +1,110 @@ +package com.linkedin.datahub.graphql.utils; + +import static org.testng.AssertJUnit.*; + +import com.linkedin.common.Owner; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; +import java.net.URISyntaxException; +import org.testng.annotations.Test; + +public class OwnerUtilsTest { + + public static String TECHNICAL_OWNER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__technical_owner"; + public static String BUSINESS_OWNER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__business_owner"; + + @Test + public void testMapOwnershipType() { + assertEquals( + OwnerUtils.mapOwnershipTypeToEntity("TECHNICAL_OWNER"), TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + } + + @Test + public void testIsOwnerEqualUrnOnly() throws URISyntaxException { + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + Owner owner1 = new Owner(); + owner1.setOwner(ownerUrn1); + assertTrue(OwnerUtils.isOwnerEqual(owner1, ownerUrn1, null)); + + Urn ownerUrn2 = new Urn("urn:li:corpuser:bar"); + assertFalse(OwnerUtils.isOwnerEqual(owner1, ownerUrn2, null)); + } + + @Test + public void testIsOwnerEqualWithLegacyTypeOnly() throws URISyntaxException { + + Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + Owner ownerWithTechnicalOwnership = new Owner(); + ownerWithTechnicalOwnership.setOwner(ownerUrn1); + ownerWithTechnicalOwnership.setType(OwnershipType.TECHNICAL_OWNER); + + assertTrue( + OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + + Owner ownerWithBusinessOwnership = new Owner(); + ownerWithBusinessOwnership.setOwner(ownerUrn1); + ownerWithBusinessOwnership.setType(OwnershipType.BUSINESS_OWNER); + assertFalse( + OwnerUtils.isOwnerEqual( + ownerWithBusinessOwnership, ownerUrn1, new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN))); + } + + @Test + public void testIsOwnerEqualOnlyOwnershipTypeUrn() throws URISyntaxException { + + Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + Urn businessOwnershipTypeUrn = new Urn(BUSINESS_OWNER_OWNERSHIP_TYPE_URN); + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + + Owner ownerWithTechnicalOwnership = new Owner(); + ownerWithTechnicalOwnership.setOwner(ownerUrn1); + ownerWithTechnicalOwnership.setTypeUrn(technicalOwnershipTypeUrn); + + Owner ownerWithBusinessOwnership = new Owner(); + ownerWithBusinessOwnership.setOwner(ownerUrn1); + ownerWithBusinessOwnership.setTypeUrn(businessOwnershipTypeUrn); + + Owner ownerWithoutOwnershipType = new Owner(); + ownerWithoutOwnershipType.setOwner(ownerUrn1); + ownerWithoutOwnershipType.setType(OwnershipType.NONE); + + assertTrue( + OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse( + OwnerUtils.isOwnerEqual(ownerWithBusinessOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse(OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, null)); + assertTrue(OwnerUtils.isOwnerEqual(ownerWithoutOwnershipType, ownerUrn1, null)); + } + + public void testIsOwnerEqualWithBothLegacyAndNewType() throws URISyntaxException { + Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + Urn businessOwnershipTypeUrn = new Urn(BUSINESS_OWNER_OWNERSHIP_TYPE_URN); + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + + Owner ownerWithLegacyTechnicalOwnership = new Owner(); + ownerWithLegacyTechnicalOwnership.setOwner(ownerUrn1); + ownerWithLegacyTechnicalOwnership.setType(OwnershipType.TECHNICAL_OWNER); + + assertTrue( + OwnerUtils.isOwnerEqual( + ownerWithLegacyTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse( + OwnerUtils.isOwnerEqual( + ownerWithLegacyTechnicalOwnership, ownerUrn1, businessOwnershipTypeUrn)); + + Owner ownerWithNewTechnicalOwnership = new Owner(); + ownerWithLegacyTechnicalOwnership.setOwner(ownerUrn1); + ownerWithLegacyTechnicalOwnership.setTypeUrn(technicalOwnershipTypeUrn); + + assertTrue( + OwnerUtils.isOwnerEqual( + ownerWithNewTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse( + OwnerUtils.isOwnerEqual( + ownerWithNewTechnicalOwnership, ownerUrn1, businessOwnershipTypeUrn)); + } +} diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 71baa8af99468a..9108f3009b4ba3 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -14,7 +14,7 @@ ext { dependencies { implementation project(':metadata-io') implementation project(':metadata-service:factories') - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-service:configuration') implementation project(':metadata-dao-impl:kafka-producer') implementation externalDependency.charle @@ -24,7 +24,7 @@ dependencies { exclude group: 'net.minidev', module: 'json-smart' exclude group: 'com.nimbusds', module: 'nimbus-jose-jwt' exclude group: "org.apache.htrace", module: "htrace-core4" - exclude group: "org.eclipse.jetty", module: "jetty-util" + exclude group: "org.eclipse.jetty" exclude group: "org.apache.hadoop.thirdparty", module: "hadoop-shaded-protobuf_3_7" exclude group: "com.charleskorn.kaml", module:"kaml" @@ -43,13 +43,16 @@ dependencies { implementation(externalDependency.jettison) { because("previous versions are vulnerable") } + implementation(externalDependency.guava) { + because("CVE-2023-2976") + } } // mock internal schema registry implementation externalDependency.kafkaAvroSerde implementation externalDependency.kafkaAvroSerializer - implementation "org.apache.kafka:kafka_2.12:$kafkaVersion" + implementation "org.apache.kafka:kafka_2.12:3.7.0" implementation externalDependency.slf4jApi compileOnly externalDependency.lombok @@ -66,9 +69,7 @@ dependencies { runtimeOnly externalDependency.mysqlConnector runtimeOnly externalDependency.postgresql - implementation(externalDependency.awsMskIamAuth) { - exclude group: 'software.amazon.awssdk', module: 'third-party-jackson-core' - } + implementation externalDependency.awsMskIamAuth annotationProcessor externalDependency.lombok annotationProcessor externalDependency.picocli diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index eee27096e22388..126df9187bc232 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -7,12 +7,13 @@ import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; import java.util.List; import javax.inject.Inject; import javax.inject.Named; import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.stereotype.Component; import picocli.CommandLine; @@ -51,18 +52,18 @@ private static final class Args { @Named("removeUnknownAspects") private RemoveUnknownAspects removeUnknownAspects; - @Inject - @Named("buildIndices") - private BuildIndices buildIndices; - - @Inject - @Named("cleanIndices") - private CleanIndices cleanIndices; - - @Inject + @Autowired(required = false) @Named("systemUpdate") private SystemUpdate systemUpdate; + @Autowired(required = false) + @Named("systemUpdateBlocking") + private SystemUpdateBlocking systemUpdateBlocking; + + @Autowired(required = false) + @Named("systemUpdateNonBlocking") + private SystemUpdateNonBlocking systemUpdateNonBlocking; + @Override public void run(String... cmdLineArgs) { _upgradeManager.register(noCodeUpgrade); @@ -70,9 +71,15 @@ public void run(String... cmdLineArgs) { _upgradeManager.register(restoreIndices); _upgradeManager.register(restoreBackup); _upgradeManager.register(removeUnknownAspects); - _upgradeManager.register(buildIndices); - _upgradeManager.register(cleanIndices); - _upgradeManager.register(systemUpdate); + if (systemUpdate != null) { + _upgradeManager.register(systemUpdate); + } + if (systemUpdateBlocking != null) { + _upgradeManager.register(systemUpdateBlocking); + } + if (systemUpdateNonBlocking != null) { + _upgradeManager.register(systemUpdateNonBlocking); + } final Args args = new Args(); new CommandLine(args).setCaseInsensitiveEnumValuesAllowed(true).parseArgs(cmdLineArgs); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 909ceeb8f3bab4..50847da07be73b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -1,5 +1,11 @@ package com.linkedin.datahub.upgrade; +import com.linkedin.gms.factory.auth.AuthorizerChainFactory; +import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory; +import com.linkedin.gms.factory.graphql.GraphQLEngineFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; +import com.linkedin.gms.factory.kafka.SimpleKafkaConsumerFactory; +import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -19,7 +25,15 @@ excludeFilters = { @ComponentScan.Filter( type = FilterType.ASSIGNABLE_TYPE, - classes = ScheduledAnalyticsFactory.class) + classes = { + ScheduledAnalyticsFactory.class, + AuthorizerChainFactory.class, + DataHubAuthorizerFactory.class, + SimpleKafkaConsumerFactory.class, + KafkaEventConsumerFactory.class, + InternalSchemaRegistryFactory.class, + GraphQLEngineFactory.class + }) }) public class UpgradeCliApplication { public static void main(String[] args) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index dd6c3fd1e44aa6..4be39ac3c4bfc1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -4,14 +4,16 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; @Override public String id() { @@ -27,9 +29,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(false); + entityClient.setWritable(false); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode off in GMS", e); context.report().addLine("Failed to turn write mode off in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8a0d374d6ee3e6..09713dc78ee279 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -4,13 +4,15 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +@Slf4j @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient entityClient; @Override public String id() { @@ -26,9 +28,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(true); + entityClient.setWritable(true); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode back on in GMS", e); context.report().addLine("Failed to turn write mode back on in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index abd144bf453ed8..a33722d7761cc4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -1,17 +1,28 @@ package com.linkedin.datahub.upgrade.config; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.browsepaths.BackfillBrowsePathsV2; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2( - EntityService entityService, SearchService searchService) { - return new BackfillBrowsePathsV2(entityService, searchService); + public NonBlockingSystemUpgrade backfillBrowsePathsV2( + final OperationContext opContext, + EntityService entityService, + SearchService searchService, + @Value("${systemUpdate.browsePathsV2.enabled}") final boolean enabled, + @Value("${systemUpdate.browsePathsV2.reprocess.enabled}") final boolean reprocessEnabled, + @Value("${systemUpdate.browsePathsV2.batchSize}") final Integer batchSize) { + return new BackfillBrowsePathsV2( + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java new file mode 100644 index 00000000000000..7226ec267dbbc5 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java @@ -0,0 +1,27 @@ +package com.linkedin.datahub.upgrade.config; + +import com.linkedin.datahub.upgrade.system.policyfields.BackfillPolicyFields; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +@Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) +public class BackfillPolicyFieldsConfig { + + @Bean + public BackfillPolicyFields backfillPolicyFields( + final OperationContext opContext, + EntityService entityService, + SearchService searchService, + @Value("${systemUpdate.policyFields.enabled}") final boolean enabled, + @Value("${systemUpdate.policyFields.reprocess.enabled}") final boolean reprocessEnabled, + @Value("${systemUpdate.policyFields.batchSize}") final Integer batchSize) { + return new BackfillPolicyFields( + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index 1e9298bc60612d..3510fa513b3b9c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -1,26 +1,33 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.BlockingSystemUpdateCondition.class) public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices( + public BlockingSystemUpgrade buildIndices( final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, final EntitySearchService entitySearchService, final GraphService graphService, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { return new BuildIndices( systemMetadataService, @@ -28,6 +35,8 @@ public BuildIndices buildIndices( entitySearchService, graphService, baseElasticSearchComponents, - configurationProvider); + configurationProvider, + aspectDao, + entityRegistry); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 5bd7244a92e45a..4f54b01459625d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; @@ -8,12 +9,14 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class CleanIndicesConfig { - @Bean(name = "cleanIndices") - public CleanIndices cleanIndices( + @Bean + public NonBlockingSystemUpgrade cleanIndices( final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, final EntitySearchService entitySearchService, diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 24bcec5852b4fc..5ba5c8a90fd4ac 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -7,13 +7,16 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class NoCodeCleanupConfig { @@ -26,6 +29,7 @@ public class NoCodeCleanupConfig { "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -34,4 +38,12 @@ public NoCodeCleanupUpgrade createInstance() { final IndexConvention indexConvention = applicationContext.getBean(IndexConvention.class); return new NoCodeCleanupUpgrade(ebeanServer, graphClient, searchClient, indexConvention); } + + @Bean(name = "noCodeCleanup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeCleanupUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeCleanupUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index 68009d7ed1718a..741aeece1cf62f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -1,32 +1,43 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class NoCodeUpgradeConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "systemEntityClient", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); } + + @Bean(name = "noCodeUpgrade") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java new file mode 100644 index 00000000000000..0281ff4f4169b5 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java @@ -0,0 +1,22 @@ +package com.linkedin.datahub.upgrade.config; + +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.vianodes.ReindexDataJobViaNodesCLL; +import com.linkedin.metadata.entity.EntityService; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +@Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) +public class ReindexDataJobViaNodesCLLConfig { + + @Bean + public NonBlockingSystemUpgrade reindexDataJobViaNodesCLL( + EntityService entityService, + @Value("${systemUpdate.dataJobNodeCLL.enabled}") final boolean enabled, + @Value("${systemUpdate.dataJobNodeCLL.batchSize}") final Integer batchSize) { + return new ReindexDataJobViaNodesCLL(entityService, enabled, batchSize); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index 0b46133209382b..5bf1241e21305b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -8,7 +8,7 @@ @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") - public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { + public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { return new RemoveUnknownAspects(entityService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 743e4ffe84b0e4..ec6e5a4a8f04d1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -1,19 +1,22 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class RestoreBackupConfig { @Autowired ApplicationContext applicationContext; @@ -22,17 +25,17 @@ public class RestoreBackupConfig { @DependsOn({ "ebeanServer", "entityService", - "systemRestliEntityClient", + "systemEntityClient", "graphService", "searchService", "entityRegistry" }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = - applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); @@ -40,4 +43,12 @@ public RestoreBackup createInstance() { return new RestoreBackup( ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); } + + @Bean(name = "restoreBackup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreBackup createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreBackup(null, null, null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index d258c4a4d1a529..008bdf5cfac388 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -3,32 +3,41 @@ import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; +@Slf4j @Configuration public class RestoreIndicesConfig { @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") - @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "searchService", "graphService"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); - final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices( - ebeanServer, entityService, entityRegistry, entitySearchService, graphService); + return new RestoreIndices(ebeanServer, entityService, entitySearchService, graphService); + } + + @Bean(name = "restoreIndices") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreIndices createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreIndices(null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java new file mode 100644 index 00000000000000..0d65af742a5925 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java @@ -0,0 +1,48 @@ +package com.linkedin.datahub.upgrade.config; + +import java.util.Objects; +import java.util.Set; +import org.springframework.boot.ApplicationArguments; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.type.AnnotatedTypeMetadata; + +public class SystemUpdateCondition implements Condition { + public static final String SYSTEM_UPDATE_ARG = "SystemUpdate"; + public static final String BLOCKING_SYSTEM_UPDATE_ARG = SYSTEM_UPDATE_ARG + "Blocking"; + public static final String NONBLOCKING_SYSTEM_UPDATE_ARG = SYSTEM_UPDATE_ARG + "NonBlocking"; + public static final Set SYSTEM_UPDATE_ARGS = + Set.of(SYSTEM_UPDATE_ARG, BLOCKING_SYSTEM_UPDATE_ARG, NONBLOCKING_SYSTEM_UPDATE_ARG); + + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context.getBeanFactory().getBean(ApplicationArguments.class).getNonOptionArgs().stream() + .filter(Objects::nonNull) + .anyMatch(SYSTEM_UPDATE_ARGS::contains); + } + + public static class BlockingSystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context + .getBeanFactory() + .getBean(ApplicationArguments.class) + .getNonOptionArgs() + .stream() + .anyMatch(arg -> SYSTEM_UPDATE_ARG.equals(arg) || BLOCKING_SYSTEM_UPDATE_ARG.equals(arg)); + } + } + + public static class NonBlockingSystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context + .getBeanFactory() + .getBean(ApplicationArguments.class) + .getNonOptionArgs() + .stream() + .anyMatch( + arg -> SYSTEM_UPDATE_ARG.equals(arg) || NONBLOCKING_SYSTEM_UPDATE_ARG.equals(arg)); + } + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 3b63d81486eb4b..bea38b616f86fb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -1,18 +1,25 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; +import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.version.GitVersion; import com.linkedin.mxe.TopicConvention; +import java.util.List; +import javax.annotation.PostConstruct; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.KafkaProducer; @@ -20,25 +27,37 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; @Slf4j @Configuration +@Conditional(SystemUpdateCondition.class) public class SystemUpdateConfig { + @Bean(name = "systemUpdate") public SystemUpdate systemUpdate( - final BuildIndices buildIndices, - final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, - @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + final List blockingSystemUpgrades, + final List nonBlockingSystemUpgrades, + final DataHubStartupStep dataHubStartupStep) { + return new SystemUpdate(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } - String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate( - buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + @Bean(name = "systemUpdateBlocking") + public SystemUpdateBlocking systemUpdateBlocking( + final List blockingSystemUpgrades, + final DataHubStartupStep dataHubStartupStep) { + return new SystemUpdateBlocking(blockingSystemUpgrades, List.of(), dataHubStartupStep); + } + + @Bean(name = "systemUpdateNonBlocking") + public SystemUpdateNonBlocking systemUpdateNonBlocking( + final List nonBlockingSystemUpgrades) { + return new SystemUpdateNonBlocking(List.of(), nonBlockingSystemUpgrades, null); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -49,6 +68,15 @@ public String getRevision() { return revision; } + @Bean + public DataHubStartupStep dataHubStartupStep( + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision) { + return new DataHubStartupStep( + kafkaEventProducer, String.format("%s-%s", gitVersion.getVersion(), revision)); + } + @Autowired @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; @@ -67,4 +95,33 @@ protected KafkaEventProducer duheKafkaEventProducer( duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } + + /** + * The ReindexDataJobViaNodesCLLConfig step requires publishing to MCL. Overriding the default + * producer with this special producer which doesn't require an active registry. + * + *

Use when INTERNAL registry and is SYSTEM_UPDATE + * + *

This forces this producer into the EntityService + */ + @Primary + @Bean(name = "kafkaEventProducer") + @ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) + protected KafkaEventProducer kafkaEventProducer( + @Qualifier("duheKafkaEventProducer") KafkaEventProducer kafkaEventProducer) { + return kafkaEventProducer; + } + + @Configuration + public static class SystemUpdateSetup { + @Autowired private EntityService entityService; + @Autowired private EntitySearchService entitySearchService; + + @PostConstruct + protected void postConstruct() { + entitySearchService.postConstruct(entityService); + } + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 6cc94fbed5bf31..57e16eb72d0250 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -8,49 +8,33 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import lombok.Getter; +import lombok.experimental.Accessors; +@Getter +@Accessors(fluent = true) public class DefaultUpgradeContext implements UpgradeContext { - private final Upgrade _upgrade; - private final UpgradeReport _report; - private final List _previousStepResults; - private final List _args; - private final Map> _parsedArgs; + private final Upgrade upgrade; + private final UpgradeReport report; + private final List previousStepResults; + private final List args; + private final Map> parsedArgs; DefaultUpgradeContext( Upgrade upgrade, UpgradeReport report, List previousStepResults, List args) { - _upgrade = upgrade; - _report = report; - _previousStepResults = previousStepResults; - _args = args; - _parsedArgs = UpgradeUtils.parseArgs(args); - } - - @Override - public Upgrade upgrade() { - return _upgrade; + this.upgrade = upgrade; + this.report = report; + this.previousStepResults = previousStepResults; + this.args = args; + this.parsedArgs = UpgradeUtils.parseArgs(args); } @Override public List stepResults() { - return _previousStepResults; - } - - @Override - public UpgradeReport report() { - return _report; - } - - @Override - public List args() { - return _args; - } - - @Override - public Map> parsedArgs() { - return _parsedArgs; + return previousStepResults; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index 623c8a71e861d8..bddf53a2749054 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -16,7 +16,9 @@ import java.util.List; import java.util.Map; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class DefaultUpgradeManager implements UpgradeManager { private final Map _upgrades = new HashMap<>(); @@ -137,6 +139,7 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { + log.error("Caught exception during attempt {} of Step with id {}", i, step.id(), e); context .report() .addLine( diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index ac56e5e91c72be..9f41daf02d2093 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -10,6 +10,7 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; @@ -170,7 +171,7 @@ public Function executable() { // Emit a browse path aspect. final BrowsePaths browsePaths; try { - browsePaths = _entityService.buildDefaultBrowsePath(urn); + browsePaths = DefaultAspectsUtil.buildDefaultBrowsePath(urn, _entityService); final AuditStamp browsePathsStamp = new AuditStamp(); browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index 6753d309b9f501..1524a015e414e4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -6,13 +6,14 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSQualificationStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; public class NoCodeUpgrade implements Upgrade { @@ -26,12 +27,17 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( - final Database server, - final EntityService entityService, + @Nullable final Database server, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); - _cleanupSteps = buildCleanupSteps(); + final SystemEntityClient entityClient) { + if (server != null) { + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override @@ -55,9 +61,9 @@ private List buildCleanupSteps() { private List buildUpgradeSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { + final SystemEntityClient entityClient) { final List steps = new ArrayList<>(); steps.add(new RemoveAspectV2TableStep(server)); steps.add(new GMSQualificationStep(ImmutableMap.of("noCode", "true"))); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index 8a267be6ad8086..6d3125423b4433 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -9,6 +9,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; import org.opensearch.client.RestHighLevelClient; public class NoCodeCleanupUpgrade implements Upgrade { @@ -18,12 +19,17 @@ public class NoCodeCleanupUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeCleanupUpgrade( - final Database server, + @Nullable final Database server, final GraphService graphClient, final RestHighLevelClient searchClient, final IndexConvention indexConvention) { - _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); - _cleanupSteps = buildCleanupSteps(); + if (server != null) { + _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index 7e55dcddc639f4..74d97767d1c394 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -17,7 +17,7 @@ public class RemoveClientIdAspectStep implements UpgradeStep { private static final String INVALID_CLIENT_ID_ASPECT = "clientId"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String id() { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index dc95b7605ef88f..3ea449051b3558 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -12,7 +12,7 @@ public class RemoveUnknownAspects implements Upgrade { private final List _steps; - public RemoveUnknownAspects(final EntityService entityService) { + public RemoveUnknownAspects(final EntityService entityService) { _steps = buildSteps(entityService); } @@ -26,7 +26,7 @@ public List steps() { return _steps; } - private List buildSteps(final EntityService entityService) { + private List buildSteps(final EntityService entityService) { final List steps = new ArrayList<>(); steps.add(new RemoveClientIdAspectStep(entityService)); return steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index b11abb2d6bc23a..bcaeaa34e8936d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -8,7 +8,7 @@ import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; import com.linkedin.datahub.upgrade.common.steps.GMSDisableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -16,20 +16,26 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public class RestoreBackup implements Upgrade { private final List _steps; public RestoreBackup( - final Database server, - final EntityService entityService, + @Nullable final Database server, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, + final SystemEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = - buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + if (server != null) { + _steps = + buildSteps( + server, entityService, entityRegistry, entityClient, graphClient, searchClient); + } else { + _steps = List.of(); + } } @Override @@ -44,9 +50,9 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, + final SystemEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { final List steps = new ArrayList<>(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 5c4567c856d0ed..147acc9c1e0f33 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -39,7 +39,7 @@ public class RestoreStorageStep implements UpgradeStep { private static final int REPORT_BATCH_SIZE = 1000; private static final int DEFAULT_THREAD_POOL = 4; - private final EntityService _entityService; + private final EntityService _entityService; private final EntityRegistry _entityRegistry; private final Map>>> _backupReaders; @@ -47,7 +47,7 @@ public class RestoreStorageStep implements UpgradeStep { private final ExecutorService _gmsThreadPool; public RestoreStorageStep( - final EntityService entityService, final EntityRegistry entityRegistry) { + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); @@ -178,8 +178,9 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final RecordTemplate aspectRecord; try { aspectRecord = - EntityUtils.toAspectRecord( - entityName, aspectName, aspect.getMetadata(), _entityRegistry); + EntityUtils.toSystemAspect(aspect.toEntityAspect(), _entityService) + .get() + .getRecordTemplate(); } catch (Exception e) { context .report() diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 212f0da9f592d0..c6839c0e63f055 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -9,6 +9,7 @@ * Strings */ public interface BackupReader { + String getName(); @Nonnull diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 8bb3b0073710a3..9bc42e23a99746 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -8,11 +8,11 @@ import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; @@ -23,18 +23,22 @@ public class RestoreIndices implements Upgrade { public static final String WRITER_POOL_SIZE = "WRITER_POOL_SIZE"; public static final String URN_ARG_NAME = "urn"; public static final String URN_LIKE_ARG_NAME = "urnLike"; + public static final String URN_BASED_PAGINATION_ARG_NAME = "urnBasedPagination"; public static final String STARTING_OFFSET_ARG_NAME = "startingOffset"; private final List _steps; public RestoreIndices( - final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry, + @Nullable final Database server, + final EntityService entityService, final EntitySearchService entitySearchService, final GraphService graphService) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + if (server != null) { + _steps = buildSteps(server, entityService, entitySearchService, graphService); + } else { + _steps = List.of(); + } } @Override @@ -49,14 +53,13 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry, + final EntityService entityService, final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); steps.add(new ClearGraphServiceStep(graphService, false)); - steps.add(new SendMAEStep(server, entityService, entityRegistry)); + steps.add(new SendMAEStep(server, entityService)); return steps; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index ce59cf2edb84e9..83bc96ad449d1b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -10,7 +10,6 @@ import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; -import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; import java.util.ArrayList; @@ -23,7 +22,9 @@ import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -31,9 +32,10 @@ public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_STARTING_OFFSET = 0; private static final int DEFAULT_THREADS = 1; + private static final boolean DEFAULT_URN_BASED_PAGINATION = false; private final Database _server; - private final EntityService _entityService; + private final EntityService _entityService; public class KafkaJob implements Callable { UpgradeContext context; @@ -46,14 +48,11 @@ public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { @Override public RestoreIndicesResult call() { - return _entityService.restoreIndices(args, context.report()::addLine); + return _entityService.streamRestoreIndices(args, context.report()::addLine).findFirst().get(); } } - public SendMAEStep( - final Database server, - final EntityService entityService, - final EntityRegistry entityRegistry) { + public SendMAEStep(final Database server, final EntityService entityService) { _server = server; _entityService = entityService; } @@ -76,7 +75,7 @@ private List iterateFutures(List iterateFutures(List executable() { List> futures = new ArrayList<>(); startTime = System.currentTimeMillis(); - while (start < rowCount) { - args = args.clone(); - args.start = start; - futures.add(executor.submit(new KafkaJob(context, args))); - start = start + args.batchSize; - } - while (futures.size() > 0) { - List tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult : tmpResults) { - reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + if (args.urnBasedPagination) { + RestoreIndicesResult previousResult = null; + int rowsProcessed = 1; + while (rowsProcessed > 0) { + args = args.clone(); + if (previousResult != null) { + args.lastUrn = previousResult.lastUrn; + args.lastAspect = previousResult.lastAspect; + } + args.start = start; + context + .report() + .addLine( + String.format( + "Getting next batch of urns + aspects, starting with %s - %s", + args.lastUrn, args.lastAspect)); + Future future = executor.submit(new KafkaJob(context, args)); + try { + RestoreIndicesResult result = future.get(); + reportStats(context, finalJobResult, result, rowCount, startTime); + previousResult = result; + rowsProcessed = result.rowsMigrated + result.ignored; + context.report().addLine(String.format("Rows processed this loop %d", rowsProcessed)); + start += args.batchSize; + } catch (InterruptedException | ExecutionException e) { + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + } + } else { + while (start < rowCount) { + args = args.clone(); + args.start = start; + futures.add(executor.submit(new KafkaJob(context, args))); + start = start + args.batchSize; + } + while (futures.size() > 0) { + List tmpResults = iterateFutures(futures); + for (RestoreIndicesResult tmpResult : tmpResults) { + reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + } } } + executor.shutdown(); if (finalJobResult.rowsMigrated != rowCount) { float percentFailed = 0.0f; @@ -233,6 +283,15 @@ private int getThreadCount(final Map> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } + private boolean getUrnBasedPagination(final Map> parsedArgs) { + boolean urnBasedPagination = DEFAULT_URN_BASED_PAGINATION; + if (containsKey(parsedArgs, RestoreIndices.URN_BASED_PAGINATION_ARG_NAME)) { + urnBasedPagination = + Boolean.parseBoolean(parsedArgs.get(RestoreIndices.URN_BASED_PAGINATION_ARG_NAME).get()); + } + return urnBasedPagination; + } + private int getInt( final Map> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java new file mode 100644 index 00000000000000..4fae5b2239d11e --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java @@ -0,0 +1,5 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.Upgrade; + +public interface BlockingSystemUpgrade extends Upgrade {} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java new file mode 100644 index 00000000000000..fd83f1544a0982 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java @@ -0,0 +1,5 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.Upgrade; + +public interface NonBlockingSystemUpgrade extends Upgrade {} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index aba751bff8177d..ad1c6c98fa3fd1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -3,53 +3,48 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeCleanupStep; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; -import com.linkedin.metadata.dao.producer.KafkaEventProducer; +import java.util.LinkedList; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import javax.annotation.Nullable; +import lombok.Getter; +import lombok.NonNull; +import lombok.experimental.Accessors; import lombok.extern.slf4j.Slf4j; +@Getter @Slf4j +@Accessors(fluent = true) public class SystemUpdate implements Upgrade { - private final List _preStartupUpgrades; - private final List _postStartupUpgrades; - private final List _steps; + private final List steps; + private final List cleanupSteps; public SystemUpdate( - final BuildIndices buildIndicesJob, - final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, - final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { - - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); - } + @NonNull final List blockingSystemUpgrades, + @NonNull final List nonBlockingSystemUpgrades, + @Nullable final DataHubStartupStep dataHubStartupStep) { - @Override - public String id() { - return "SystemUpdate"; - } + steps = new LinkedList<>(); + cleanupSteps = new LinkedList<>(); - @Override - public List steps() { - return Stream.concat( - Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); + // blocking upgrades + steps.addAll(blockingSystemUpgrades.stream().flatMap(up -> up.steps().stream()).toList()); + cleanupSteps.addAll( + blockingSystemUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()).toList()); + + // emit system update message if blocking upgrade(s) present + if (dataHubStartupStep != null && !blockingSystemUpgrades.isEmpty()) { + steps.add(dataHubStartupStep); + } + + // add non-blocking upgrades last + steps.addAll(nonBlockingSystemUpgrades.stream().flatMap(up -> up.steps().stream()).toList()); + cleanupSteps.addAll( + nonBlockingSystemUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()).toList()); } @Override - public List cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); + public String id() { + return getClass().getSimpleName(); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java new file mode 100644 index 00000000000000..32841149c467b3 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java @@ -0,0 +1,16 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; +import java.util.List; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +public class SystemUpdateBlocking extends SystemUpdate { + + public SystemUpdateBlocking( + @NonNull List blockingSystemUpgrades, + @NonNull List nonBlockingSystemUpgrades, + @Nullable DataHubStartupStep dataHubStartupStep) { + super(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java new file mode 100644 index 00000000000000..3309babc1f6cf2 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java @@ -0,0 +1,16 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; +import java.util.List; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +public class SystemUpdateNonBlocking extends SystemUpdate { + + public SystemUpdateNonBlocking( + @NonNull List blockingSystemUpgrades, + @NonNull List nonBlockingSystemUpgrades, + @Nullable DataHubStartupStep dataHubStartupStep) { + super(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java new file mode 100644 index 00000000000000..16c039e2a64abd --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java @@ -0,0 +1,41 @@ +package com.linkedin.datahub.upgrade.system.browsepaths; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; + +public class BackfillBrowsePathsV2 implements NonBlockingSystemUpgrade { + + private final List _steps; + + public BackfillBrowsePathsV2( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + if (enabled) { + _steps = + ImmutableList.of( + new BackfillBrowsePathsV2Step( + opContext, entityService, searchService, reprocessEnabled, batchSize)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return "BackfillBrowsePathsV2"; + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java similarity index 74% rename from datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java rename to datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java index 610d9069337a52..30674ecc3d00eb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java @@ -1,4 +1,4 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; +package com.linkedin.datahub.upgrade.system.browsepaths; import static com.linkedin.metadata.Constants.*; @@ -15,8 +15,9 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; +import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -29,6 +30,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import io.datahubproject.metadata.context.OperationContext; import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; @@ -36,9 +38,8 @@ @Slf4j public class BackfillBrowsePathsV2Step implements UpgradeStep { - public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = - "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + private static final String UPGRADE_ID = "BackfillBrowsePathsV2Step"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; private static final Set ENTITY_TYPES_TO_MIGRATE = @@ -52,14 +53,25 @@ public class BackfillBrowsePathsV2Step implements UpgradeStep { Constants.ML_MODEL_GROUP_ENTITY_NAME, Constants.ML_FEATURE_TABLE_ENTITY_NAME, Constants.ML_FEATURE_ENTITY_NAME); - private static final Integer BATCH_SIZE = 5000; - private final EntityService _entityService; - private final SearchService _searchService; - - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { - _searchService = searchService; - _entityService = entityService; + private final OperationContext opContext; + private final EntityService entityService; + private final SearchService searchService; + + private final boolean reprocessEnabled; + private final Integer batchSize; + + public BackfillBrowsePathsV2Step( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean reprocessEnabled, + Integer batchSize) { + this.opContext = opContext; + this.searchService = searchService; + this.entityService = entityService; + this.reprocessEnabled = reprocessEnabled; + this.batchSize = batchSize; } @Override @@ -77,11 +89,14 @@ public Function executable() { log.info( String.format( "Upgrading batch %s-%s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, entityType)); + migratedCount, migratedCount + batchSize, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); - migratedCount += BATCH_SIZE; + migratedCount += batchSize; } while (scrollId != null); } + + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } @@ -90,27 +105,29 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final Filter filter; - if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) - && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + if (reprocessEnabled) { filter = backfillDefaultBrowsePathsV2Filter(); } else { filter = backfillBrowsePathsV2Filter(); } final ScrollResult scrollResult = - _searchService.scrollAcrossEntities( + searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), ImmutableList.of(entityType), "*", filter, null, scrollId, null, - BATCH_SIZE, - new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipHighlighting(true) - .setSkipAggregates(true)); + batchSize); + if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -181,7 +198,8 @@ private Filter backfillDefaultBrowsePathsV2Filter() { } private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exception { - BrowsePathsV2 browsePathsV2 = _entityService.buildDefaultBrowsePathV2(urn, true); + BrowsePathsV2 browsePathsV2 = + DefaultAspectsUtil.buildDefaultBrowsePathV2(urn, true, entityService); log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); @@ -191,12 +209,12 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setSystemMetadata( new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal(proposal, auditStamp, true); + entityService.ingestProposal(proposal, auditStamp, true); } @Override public String id() { - return "BackfillBrowsePathsV2Step"; + return UPGRADE_ID; } /** @@ -209,7 +227,23 @@ public boolean isOptional() { } @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variables REPROCESS_DEFAULT_BROWSE_PATHS_V2 & BACKFILL_BROWSE_PATHS_V2 to determine whether to + * skip. + */ public boolean skip(UpgradeContext context) { - return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); + boolean envEnabled = Boolean.parseBoolean(System.getenv("BACKFILL_BROWSE_PATHS_V2")); + + if (reprocessEnabled && envEnabled) { + return false; + } + + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return (previouslyRun || !envEnabled); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index eb76a72fba71af..fea0479876a2e9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.upgrade.system.elasticsearch; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; @@ -17,7 +19,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public class BuildIndices implements Upgrade { +public class BuildIndices implements BlockingSystemUpgrade { private final List _steps; @@ -28,7 +30,9 @@ public BuildIndices( final GraphService graphService, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { List indexedServices = Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) @@ -36,7 +40,13 @@ public BuildIndices( .map(service -> (ElasticSearchIndexed) service) .collect(Collectors.toList()); - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + _steps = + buildSteps( + indexedServices, + baseElasticSearchComponents, + configurationProvider, + aspectDao, + entityRegistry); } @Override @@ -53,13 +63,19 @@ private List buildSteps( final List indexedServices, final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao, + final EntityRegistry entityRegistry) { final List steps = new ArrayList<>(); // Disable ES write mode/change refresh rate and clone indices steps.add( new BuildIndicesPreStep( - baseElasticSearchComponents, indexedServices, configurationProvider)); + baseElasticSearchComponents, + indexedServices, + configurationProvider, + aspectDao, + entityRegistry)); // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService steps.add(new BuildIndicesStep(indexedServices)); // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index ad68386622b216..e316481e2b07e6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -1,7 +1,7 @@ package com.linkedin.datahub.upgrade.system.elasticsearch; -import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.CleanIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; @@ -16,7 +16,7 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class CleanIndices implements Upgrade { +public class CleanIndices implements NonBlockingSystemUpgrade { private final List _steps; public CleanIndices( diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index c25888be07f899..0695dbe4b1acb0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -2,8 +2,13 @@ import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; +import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; +import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableMap; +import com.linkedin.common.Status; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -11,11 +16,16 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; @@ -31,6 +41,8 @@ public class BuildIndicesPreStep implements UpgradeStep { private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents _esComponents; private final List _services; private final ConfigurationProvider _configurationProvider; + private final AspectDao _aspectDao; + private final EntityRegistry _entityRegistry; @Override public String id() { @@ -46,9 +58,17 @@ public int retryCount() { public Function executable() { return (context) -> { try { + final List reindexConfigs; + if (_configurationProvider.getStructuredProperties().isSystemUpdateEnabled()) { + reindexConfigs = + getAllReindexConfigs(_services, getActiveStructuredPropertiesDefinitions(_aspectDao)); + } else { + reindexConfigs = getAllReindexConfigs(_services); + } + // Get indices to update List indexConfigs = - getAllReindexConfigs(_services).stream() + reindexConfigs.stream() .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); @@ -133,4 +153,31 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc return ack; } + + private static Set getActiveStructuredPropertiesDefinitions( + AspectDao aspectDao) { + Set removedStructuredPropertyUrns = + aspectDao + .streamAspects(STRUCTURED_PROPERTY_ENTITY_NAME, STATUS_ASPECT_NAME) + .map( + entityAspect -> + Pair.of( + entityAspect.getUrn(), + RecordUtils.toRecordTemplate(Status.class, entityAspect.getMetadata()))) + .filter(status -> status.getSecond().isRemoved()) + .map(Pair::getFirst) + .collect(Collectors.toSet()); + + return aspectDao + .streamAspects(STRUCTURED_PROPERTY_ENTITY_NAME, STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME) + .map( + entityAspect -> + Pair.of( + entityAspect.getUrn(), + RecordUtils.toRecordTemplate( + StructuredPropertyDefinition.class, entityAspect.getMetadata()))) + .filter(definition -> !removedStructuredPropertyUrns.contains(definition.getKey())) + .map(Pair::getSecond) + .collect(Collectors.toSet()); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index b4a506c3f5c63c..d2b5965a3109ce 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -33,7 +33,7 @@ public Function executable() { DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); - log.info("Initiating startup for version: {}", _version); + log.info("System Update finished for version: {}", _version); } catch (Exception e) { log.error("DataHubStartupStep failed.", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index b3de7c503fb3e5..52b34200991c35 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,8 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Set; import lombok.extern.slf4j.Slf4j; @@ -39,6 +41,23 @@ public static List getAllReindexConfigs( return reindexConfigs; } + public static List getAllReindexConfigs( + List elasticSearchIndexedList, + Collection structuredProperties) + throws IOException { + // Avoid locking & reprocessing + List reindexConfigs = new ArrayList<>(_reindexConfigs); + if (reindexConfigs.isEmpty()) { + for (ElasticSearchIndexed elasticSearchIndexed : elasticSearchIndexedList) { + reindexConfigs.addAll( + elasticSearchIndexed.buildReindexConfigsWithAllStructProps(structuredProperties)); + } + _reindexConfigs = new ArrayList<>(reindexConfigs); + } + + return reindexConfigs; + } + public static boolean validateWriteBlock( RestHighLevelClient esClient, String indexName, boolean expectedState) throws IOException, InterruptedException { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java deleted file mode 100644 index 03f0b0b7f2ec2f..00000000000000 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; - -import com.google.common.collect.ImmutableList; -import com.linkedin.datahub.upgrade.Upgrade; -import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.search.SearchService; -import java.util.List; - -public class BackfillBrowsePathsV2 implements Upgrade { - - private final List _steps; - - public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { - _steps = ImmutableList.of(new BackfillBrowsePathsV2Step(entityService, searchService)); - } - - @Override - public String id() { - return "BackfillBrowsePathsV2"; - } - - @Override - public List steps() { - return _steps; - } -} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java new file mode 100644 index 00000000000000..ca568e91928951 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java @@ -0,0 +1,40 @@ +package com.linkedin.datahub.upgrade.system.policyfields; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; + +public class BackfillPolicyFields implements NonBlockingSystemUpgrade { + private final List _steps; + + public BackfillPolicyFields( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + if (enabled) { + _steps = + ImmutableList.of( + new BackfillPolicyFieldsStep( + opContext, entityService, searchService, reprocessEnabled, batchSize)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return "BackfillPolicyFields"; + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java new file mode 100644 index 00000000000000..733a871f955959 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java @@ -0,0 +1,247 @@ +package com.linkedin.datahub.upgrade.system.policyfields; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.entity.EntityResponse; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.mxe.SystemMetadata; +import com.linkedin.policy.DataHubPolicyInfo; +import io.datahubproject.metadata.context.OperationContext; +import java.net.URISyntaxException; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; + +/** + * This bootstrap step is responsible for upgrading DataHub policy documents with new searchable + * fields in ES + */ +@Slf4j +public class BackfillPolicyFieldsStep implements UpgradeStep { + private static final String UPGRADE_ID = "BackfillPolicyFieldsStep"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + + private final OperationContext opContext; + private final boolean reprocessEnabled; + private final Integer batchSize; + private final EntityService entityService; + private final SearchService _searchService; + + public BackfillPolicyFieldsStep( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean reprocessEnabled, + Integer batchSize) { + this.opContext = opContext; + this.entityService = entityService; + this._searchService = searchService; + this.reprocessEnabled = reprocessEnabled; + this.batchSize = batchSize; + } + + @Override + public String id() { + return UPGRADE_ID; + } + + @Override + public Function executable() { + return (context) -> { + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + String scrollId = null; + int migratedCount = 0; + do { + log.info("Upgrading batch of policies {}-{}", migratedCount, migratedCount + batchSize); + scrollId = backfillPolicies(auditStamp, scrollId); + migratedCount += batchSize; + } while (scrollId != null); + + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); + + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + /** + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. + */ + @Override + public boolean isOptional() { + return true; + } + + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variables REPROCESS_DEFAULT_POLICY_FIELDS & BACKFILL_BROWSE_PATHS_V2 to determine whether to + * skip. + */ + @Override + public boolean skip(UpgradeContext context) { + + if (reprocessEnabled) { + return false; + } + + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return previouslyRun; + } + + private String backfillPolicies(AuditStamp auditStamp, String scrollId) { + + final Filter filter = backfillPolicyFieldFilter(); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), + ImmutableList.of(Constants.POLICY_ENTITY_NAME), + "*", + filter, + null, + scrollId, + null, + batchSize); + + if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().isEmpty()) { + return null; + } + + List> futures = new LinkedList<>(); + for (SearchEntity searchEntity : scrollResult.getEntities()) { + try { + ingestPolicyFields(searchEntity.getEntity(), auditStamp).ifPresent(futures::add); + } catch (Exception e) { + // don't stop the whole step because of one bad urn or one bad ingestion + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); + } + } + + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + return scrollResult.getScrollId(); + } + + private Filter backfillPolicyFieldFilter() { + // Condition: Does not have at least 1 of: `privileges`, `editable`, `state` or `type` + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + + conjunctiveCriterionArray.add(getCriterionForMissingField("privilege")); + conjunctiveCriterionArray.add(getCriterionForMissingField("editable")); + conjunctiveCriterionArray.add(getCriterionForMissingField("state")); + conjunctiveCriterionArray.add(getCriterionForMissingField("type")); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private Optional> ingestPolicyFields(Urn urn, AuditStamp auditStamp) { + EntityResponse entityResponse = null; + try { + entityResponse = + entityService.getEntityV2( + urn.getEntityType(), urn, Collections.singleton(DATAHUB_POLICY_INFO_ASPECT_NAME)); + } catch (URISyntaxException e) { + log.error( + String.format( + "Error getting DataHub Policy Info for entity with urn %s while restating policy information", + urn), + e); + } + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DATAHUB_POLICY_INFO_ASPECT_NAME)) { + final DataMap dataMap = + entityResponse.getAspects().get(DATAHUB_POLICY_INFO_ASPECT_NAME).getValue().data(); + final DataHubPolicyInfo infoAspect = new DataHubPolicyInfo(dataMap); + + log.debug("Restating policy information for urn {} with value {}", urn, infoAspect); + return Optional.of( + entityService + .alwaysProduceMCLAsync( + urn, + urn.getEntityType(), + DATAHUB_POLICY_INFO_ASPECT_NAME, + entityService + .getEntityRegistry() + .getAspectSpecs() + .get(DATAHUB_POLICY_INFO_ASPECT_NAME), + null, + infoAspect, + null, + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(System.currentTimeMillis()), + auditStamp, + ChangeType.RESTATE) + .getFirst()); + } + + return Optional.empty(); + } + + @NotNull + private static ConjunctiveCriterion getCriterionForMissingField(String field) { + final Criterion missingPrivilegesField = new Criterion(); + missingPrivilegesField.setCondition(Condition.IS_NULL); + missingPrivilegesField.setField(field); + + final CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(missingPrivilegesField); + final ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + return conjunctiveCriterion; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java new file mode 100644 index 00000000000000..c997aa15df9899 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java @@ -0,0 +1,37 @@ +package com.linkedin.datahub.upgrade.system.vianodes; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.EntityService; +import java.util.List; +import lombok.extern.slf4j.Slf4j; + +/** + * A job that reindexes all datajob inputoutput aspects as part of the via node upgrade. This is + * required to index column-level lineage correctly using via nodes. + */ +@Slf4j +public class ReindexDataJobViaNodesCLL implements NonBlockingSystemUpgrade { + + private final List _steps; + + public ReindexDataJobViaNodesCLL( + EntityService entityService, boolean enabled, Integer batchSize) { + if (enabled) { + _steps = ImmutableList.of(new ReindexDataJobViaNodesCLLStep(entityService, batchSize)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return this.getClass().getName(); + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java new file mode 100644 index 00000000000000..6aa28879dfd1e5 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java @@ -0,0 +1,86 @@ +package com.linkedin.datahub.upgrade.system.vianodes; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ReindexDataJobViaNodesCLLStep implements UpgradeStep { + + public static final String UPGRADE_ID = "via-node-cll-reindex-datajob-v2"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + + private final EntityService entityService; + private final Integer batchSize; + + public ReindexDataJobViaNodesCLLStep(EntityService entityService, Integer batchSize) { + this.entityService = entityService; + this.batchSize = batchSize; + } + + @Override + public Function executable() { + return (context) -> { + RestoreIndicesArgs args = + new RestoreIndicesArgs() + .aspectName(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .urnLike("urn:li:" + DATA_JOB_ENTITY_NAME + ":%") + .batchSize(batchSize); + + entityService + .streamRestoreIndices(args, x -> context.report().addLine((String) x)) + .forEach( + result -> { + context.report().addLine("Rows migrated: " + result.rowsMigrated); + context.report().addLine("Rows ignored: " + result.ignored); + }); + + BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); + context.report().addLine("State updated: " + UPGRADE_ID_URN); + + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + @Override + public String id() { + return UPGRADE_ID; + } + + /** + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. + */ + @Override + public boolean isOptional() { + return false; + } + + @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT to determine whether to skip. + */ + public boolean skip(UpgradeContext context) { + boolean previouslyRun = + entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + boolean envFlagRecommendsSkip = + Boolean.parseBoolean(System.getenv("SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT")); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + if (envFlagRecommendsSkip) { + log.info("Environment variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT is set to true. Skipping."); + } + return (previouslyRun || envFlagRecommendsSkip); + } +} diff --git a/datahub-upgrade/src/main/resources/application.properties b/datahub-upgrade/src/main/resources/application.properties new file mode 100644 index 00000000000000..b884c92f74bd48 --- /dev/null +++ b/datahub-upgrade/src/main/resources/application.properties @@ -0,0 +1,5 @@ +management.health.elasticsearch.enabled=false +management.health.neo4j.enabled=false +ingestion.enabled=false +spring.main.allow-bean-definition-overriding=true +entityClient.impl=restli diff --git a/datahub-upgrade/src/main/resources/logback.xml b/datahub-upgrade/src/main/resources/logback.xml index b934e4aa16fc08..3803dc67468466 100644 --- a/datahub-upgrade/src/main/resources/logback.xml +++ b/datahub-upgrade/src/main/resources/logback.xml @@ -10,6 +10,7 @@ scanned from multiple locations + [ignore_throttled] parameter is deprecated because frozen indices have been deprecated @@ -32,6 +33,7 @@ scanned from multiple locations + [ignore_throttled] parameter is deprecated because frozen indices have been deprecated diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index 83b8e028727cea..ed09a4a5aec2b9 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -4,6 +4,8 @@ import static org.testng.AssertJUnit.assertNotNull; import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.metadata.dao.producer.KafkaEventProducer; +import com.linkedin.metadata.entity.EntityServiceImpl; import java.util.List; import java.util.Map; import java.util.Optional; @@ -19,19 +21,37 @@ classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, properties = { "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic", + "METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=test_mcl_versioned_topic" + }, + args = {"-u", "SystemUpdate"}) public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { @Autowired @Named("systemUpdate") private SystemUpdate systemUpdate; + @Autowired + @Named("kafkaEventProducer") + private KafkaEventProducer kafkaEventProducer; + + @Autowired + @Named("duheKafkaEventProducer") + private KafkaEventProducer duheKafkaEventProducer; + + @Autowired private EntityServiceImpl entityService; + @Test public void testSystemUpdateInit() { assertNotNull(systemUpdate); } + @Test + public void testSystemUpdateKafkaProducerOverride() { + assertEquals(kafkaEventProducer, duheKafkaEventProducer); + assertEquals(entityService.getProducer(), duheKafkaEventProducer); + } + @Test public void testSystemUpdateSend() { UpgradeStepResult.Result result = diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNonBlockingTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNonBlockingTest.java new file mode 100644 index 00000000000000..e1257df9ad7484 --- /dev/null +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNonBlockingTest.java @@ -0,0 +1,64 @@ +package com.linkedin.datahub.upgrade; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.testng.AssertJUnit.assertNotNull; + +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; +import com.linkedin.datahub.upgrade.system.vianodes.ReindexDataJobViaNodesCLL; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import java.util.List; +import javax.inject.Named; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; + +@ActiveProfiles("test") +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "BOOTSTRAP_SYSTEM_UPDATE_DATA_JOB_NODE_CLL_ENABLED=true", + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic", + "METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=test_mcl_versioned_topic" + }, + args = {"-u", "SystemUpdateNonBlocking"}) +public class DatahubUpgradeNonBlockingTest extends AbstractTestNGSpringContextTests { + + @Autowired(required = false) + @Named("systemUpdateNonBlocking") + private SystemUpdateNonBlocking systemUpdateNonBlocking; + + @Autowired + @Test + public void testSystemUpdateNonBlockingInit() { + assertNotNull(systemUpdateNonBlocking); + } + + @Test + public void testReindexDataJobViaNodesCLLPaging() { + EntityService mockService = mock(EntityService.class); + ReindexDataJobViaNodesCLL cllUpgrade = new ReindexDataJobViaNodesCLL(mockService, true, 10); + SystemUpdateNonBlocking upgrade = + new SystemUpdateNonBlocking(List.of(), List.of(cllUpgrade), null); + DefaultUpgradeManager manager = new DefaultUpgradeManager(); + manager.register(upgrade); + manager.execute("SystemUpdateNonBlocking", List.of()); + verify(mockService, times(1)) + .streamRestoreIndices( + eq( + new RestoreIndicesArgs() + .batchSize(10) + .limit(0) + .aspectName("dataJobInputOutput") + .urnLike("urn:li:dataJob:%")), + any()); + } +} diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 3e655be900bf28..dc4c3073ee351c 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -3,7 +3,7 @@ import static org.testng.AssertJUnit.*; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; @@ -14,6 +14,7 @@ @ActiveProfiles("test") @SpringBootTest( + args = {"-u", "SystemUpdate"}, classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { @@ -23,7 +24,7 @@ public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests @Autowired @Named("buildIndices") - private BuildIndices buildIndices; + private BlockingSystemUpgrade buildIndices; @Autowired private ESIndexBuilder esIndexBuilder; diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 0e7bf5ddd5250c..5c2d6fff0f07c7 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -1,15 +1,21 @@ package com.linkedin.datahub.upgrade; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.registry.SchemaRegistryService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; import io.ebean.Database; +import java.util.Optional; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; @TestConfiguration @@ -20,8 +26,6 @@ public class UpgradeCliApplicationTestConfiguration { @MockBean private Database ebeanServer; - @MockBean private EntityService _entityService; - @MockBean private SearchService searchService; @MockBean private GraphService graphService; @@ -31,4 +35,11 @@ public class UpgradeCliApplicationTestConfiguration { @MockBean ConfigEntityRegistry configEntityRegistry; @MockBean public EntityIndexBuilders entityIndexBuilders; + + @Bean + public SchemaRegistryService schemaRegistryService() { + SchemaRegistryService mockService = mock(SchemaRegistryService.class); + when(mockService.getSchemaIdForTopic(anyString())).thenReturn(Optional.of(0)); + return mockService; + } } diff --git a/datahub-web-react/.env b/datahub-web-react/.env index e5529bbdaa56da..7c02340752104b 100644 --- a/datahub-web-react/.env +++ b/datahub-web-react/.env @@ -1,5 +1,3 @@ -PUBLIC_URL=/assets REACT_APP_THEME_CONFIG=theme_light.config.json SKIP_PREFLIGHT_CHECK=true -BUILD_PATH=build/yarn -REACT_APP_PROXY_TARGET=http://localhost:9002 \ No newline at end of file +REACT_APP_PROXY_TARGET=http://localhost:9002 diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 2806942dd10531..5627283af1af1c 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -5,10 +5,10 @@ module.exports = { 'airbnb-typescript', 'airbnb/hooks', 'plugin:@typescript-eslint/recommended', - 'plugin:jest/recommended', + 'plugin:vitest/recommended', 'prettier', ], - plugins: ['@typescript-eslint'], + plugins: ['@typescript-eslint', 'react-refresh'], parserOptions: { ecmaVersion: 2020, // Allows for the parsing of modern ECMAScript features sourceType: 'module', // Allows for the use of imports @@ -46,6 +46,9 @@ module.exports = { argsIgnorePattern: '^_', }, ], + 'vitest/prefer-to-be': 'off', + '@typescript-eslint/no-use-before-define': ['error', { functions: false, classes: false }], + 'react-refresh/only-export-components': ['warn', { 'allowConstantExport': true }], }, settings: { react: { diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index fd36e5ac4bc2c3..103792b20f761d 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'distribution' + id 'com.github.node-gradle.node' } -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' node { @@ -19,7 +19,7 @@ node { version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.1' + yarnVersion = '1.22.21' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -35,7 +35,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } @@ -44,10 +44,33 @@ node { */ task yarnInstall(type: YarnTask) { args = ['install'] + + // The node_modules directory can contain built artifacts, so + // it's not really safe to cache it. + outputs.cacheIf { false } + + inputs.files( + file('yarn.lock'), + file('package.json'), + ) + outputs.dir('node_modules') } task yarnGenerate(type: YarnTask, dependsOn: yarnInstall) { args = ['run', 'generate'] + + outputs.cacheIf { true } + + inputs.files( + yarnInstall.inputs.files, + file('codegen.yml'), + project.fileTree(dir: "../datahub-graphql-core/src/main/resources/", include: "*.graphql"), + project.fileTree(dir: "src", include: "**/*.graphql"), + ) + + outputs.files( + project.fileTree(dir: "src", include: "**/*.generated.ts"), + ) } task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -55,7 +78,8 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { } task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - args = ['run', 'test', '--watchAll', 'false'] + // Explicitly runs in non-watch mode. + args = ['run', 'test', 'run'] } task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -68,13 +92,28 @@ task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { args = ['run', 'lint-fix'] } -task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnTest, yarnLint]) { +task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { + environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] + + outputs.cacheIf { true } + inputs.files( + file('index.html'), + project.fileTree(dir: "src"), + project.fileTree(dir: "public"), + + yarnInstall.inputs.files, + yarnGenerate.outputs.files, + + file('.env'), + file('vite.config.ts'), + file('tsconfig.json'), + ) + outputs.dir('dist') } -task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] - args = ['run', 'build'] +task cleanGenerate { + delete fileTree(dir: 'src', include: '**/*.generated.ts') } task cleanExtraDirs { @@ -82,10 +121,8 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete 'src/types.generated.ts' - delete fileTree('../datahub-frontend/public') - delete fileTree(dir: 'src/graphql', include: '*.generated.ts') } +cleanExtraDirs.finalizedBy(cleanGenerate) clean.finalizedBy(cleanExtraDirs) configurations { @@ -93,24 +130,16 @@ configurations { } distZip { - dependsOn yarnQuickBuild - baseName 'datahub-web-react' + dependsOn yarnBuild + archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } -task copyAssets(dependsOn: distZip) { - doLast { - copy { - from zipTree(distZip.outputs.files.first()) - into "../datahub-frontend/public" - } - } -} - jar { - dependsOn distZip, copyAssets + dependsOn distZip into('public') { from zipTree(distZip.outputs.files.first()) } - classifier = 'assets' + archiveClassifier = 'assets' } +build.dependsOn jar diff --git a/datahub-web-react/craco.config.js b/datahub-web-react/craco.config.js deleted file mode 100644 index 6ede45902128f5..00000000000000 --- a/datahub-web-react/craco.config.js +++ /dev/null @@ -1,75 +0,0 @@ -/* eslint-disable @typescript-eslint/no-var-requires */ -require('dotenv').config(); -const { whenProd } = require('@craco/craco'); -const CracoAntDesignPlugin = require('craco-antd'); -const path = require('path'); -const CopyWebpackPlugin = require('copy-webpack-plugin'); - -// eslint-disable-next-line import/no-dynamic-require -const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); - -function addLessPrefixToKeys(styles) { - const output = {}; - Object.keys(styles).forEach((key) => { - output[`@${key}`] = styles[key]; - }); - return output; -} - -module.exports = { - webpack: { - configure: { - optimization: whenProd(() => ({ - splitChunks: { - cacheGroups: { - vendor: { - test: /[\\/]node_modules[\\/]/, - name: 'vendors', - chunks: 'all', - }, - }, - }, - })), - // Webpack 5 no longer automatically pollyfill core Node.js modules - resolve: { fallback: { fs: false } }, - // Ignore Webpack 5's missing source map warnings from node_modules - ignoreWarnings: [{ module: /node_modules/, message: /source-map-loader/ }], - }, - plugins: { - add: [ - // Self host images by copying them to the build directory - new CopyWebpackPlugin({ - patterns: [{ from: 'src/images', to: 'platforms' }], - }), - // Copy monaco-editor files to the build directory - new CopyWebpackPlugin({ - patterns: [ - { from: 'node_modules/monaco-editor/min/vs/', to: 'monaco-editor/vs' }, - { from: 'node_modules/monaco-editor/min-maps/vs/', to: 'monaco-editor/min-maps/vs' }, - ], - }), - ], - }, - }, - plugins: [ - { - plugin: CracoAntDesignPlugin, - options: { - customizeThemeLessPath: path.join(__dirname, 'src/conf/theme/global-variables.less'), - customizeTheme: addLessPrefixToKeys(themeConfig.styles), - }, - }, - ], - jest: { - configure: { - // Use dist files instead of source files - moduleNameMapper: { - '^d3-interpolate-path': `d3-interpolate-path/build/d3-interpolate-path`, - '^d3-(.*)$': `d3-$1/dist/d3-$1`, - '^lib0/((?!dist).*)$': 'lib0/dist/$1.cjs', - '^y-protocols/(.*)$': 'y-protocols/dist/$1.cjs', - '\\.(css|less)$': '/src/__mocks__/styleMock.js', - }, - }, - }, -}; diff --git a/datahub-web-react/datahub-frontend.graphql b/datahub-web-react/datahub-frontend.graphql deleted file mode 100644 index 6df3c387e14fe7..00000000000000 --- a/datahub-web-react/datahub-frontend.graphql +++ /dev/null @@ -1,389 +0,0 @@ -scalar Long - -schema { - query: Query - mutation: Mutation -} - -type Query { - dataset(urn: String!): Dataset - user(urn: String!): CorpUser - search(input: SearchInput!): SearchResults - autoComplete(input: AutoCompleteInput!): AutoCompleteResults - browse(input: BrowseInput!): BrowseResults - browsePaths(input: BrowsePathsInput!): [[String!]!] -} - -type Mutation { - logIn(username: String!, password: String!): CorpUser - updateDataset(input: DatasetUpdateInput!): Dataset -} - -input DatasetUpdateInput { - urn: String! - ownership: OwnershipUpdate -} - -input OwnershipUpdate { - owners: [OwnerUpdate!] -} - -input OwnerUpdate { - # The owner URN, eg urn:li:corpuser:1 - owner: String! - - # The owner role type - type: OwnershipType! -} - -enum OwnershipSourceType { - AUDIT - DATABASE - FILE_SYSTEM - ISSUE_TRACKING_SYSTEM - MANUAL - SERVICE - SOURCE_CONTROL - OTHER -} - -type OwnershipSource { - """ - The type of the source - """ - type: OwnershipSourceType! - - """ - A reference URL for the source - """ - url: String -} - -enum OwnershipType { - """ - A person or group that is in charge of developing the code - """ - DEVELOPER - - """ - A person or group that is owning the data - """ - DATAOWNER - - """ - A person or a group that overseas the operation, e.g. a DBA or SRE. - """ - DELEGATE - - """ - A person, group, or service that produces/generates the data - """ - PRODUCER - - """ - A person, group, or service that consumes the data - """ - CONSUMER - - """ - A person or a group that has direct business interest - """ - STAKEHOLDER -} - -type Owner { - """ - Owner object - """ - owner: CorpUser! - - """ - The type of the ownership - """ - type: OwnershipType - - """ - Source information for the ownership - """ - source: OwnershipSource -} - -type Ownership { - owners: [Owner!] - - lastModified: Long! -} - -enum FabricType { - """ - Designates development fabrics - """ - DEV - - """ - Designates early-integration (staging) fabrics - """ - EI - - """ - Designates production fabrics - """ - PROD - - """ - Designates corporation fabrics - """ - CORP -} - -enum PlatformNativeType { - """ - Table - """ - TABLE - - """ - View - """ - VIEW - - """ - Directory in file system - """ - DIRECTORY - - """ - Stream - """ - STREAM - - """ - Bucket in key value store - """ - BUCKET -} - -type PropertyTuple { - key: String! - value: String -} - -type SubTypes { - typeNames: [String!] -} - -type Dataset { - urn: String! - - platform: String! - - name: String! - - origin: FabricType! - - description: String - - uri: String - - platformNativeType: PlatformNativeType - - tags: [String!]! - - properties: [PropertyTuple!] - - createdTime: Long! - - modifiedTime: Long! - - ownership: Ownership - - subTypes: SubTypes -} - -type CorpUserInfo { - active: Boolean! - - displayName: String - - email: String! - - title: String - - manager: CorpUser - - departmentId: Long - - departmentName: String - - firstName: String - - lastName: String - - fullName: String - - countryCode: String -} - -type CorpUserEditableInfo { - aboutMe: String - - teams: [String!] - - skills: [String!] - - pictureLink: String -} - -type CorpUser { - urn: String! - - username: String! - - info: CorpUserInfo - - editableInfo: CorpUserEditableInfo -} - -type CorpGroup implements Entity { - """ - The unique user URN - """ - urn: String! - - """ - GMS Entity Type - """ - type: EntityType! - - """ - group name e.g. wherehows-dev, ask_metadata - """ - name: String - - """ - Information of the corp group - """ - info: CorpGroupInfo -} - - -type CorpGroupInfo { - """ - email of this group - """ - email: String! - - """ - owners of this group - """ - admins: [String!]! - - """ - List of ldap urn in this group. - """ - members: [String!]! - - """ - List of groups in this group. - """ - groups: [String!]! -} - -enum EntityType { - DATASET - USER - DATA_FLOW - DATA_JOB - CORP_USER - CORP_GROUP -} - -# Search Input -input SearchInput { - type: EntityType! - query: String! - start: Int - count: Int - filters: [FacetFilterInput!] -} - -input FacetFilterInput { - field: String! # Facet Field Name - value: String! # Facet Value -} - -# Search Output -type SearchResults { - start: Int! - count: Int! - total: Int! - elements: [SearchResult!]! - facets: [FacetMetadata!] -} - -union SearchResult = Dataset | CorpUser - -type FacetMetadata { - field: String! - aggregations: [AggregationMetadata!]! -} - -type AggregationMetadata { - value: String! - count: Long! -} - -# Autocomplete Input -input AutoCompleteInput { - type: EntityType! - query: String! - field: String # Field name - limit: Int - filters: [FacetFilterInput!] -} - -# Autocomplete Output -type AutoCompleteResults { - query: String! - suggestions: [String!]! -} - -# Browse Inputs -input BrowseInput { - type: EntityType! - path: [String!] - start: Int - count: Int - filters: [FacetFilterInput!] -} - -# Browse Output -type BrowseResults { - entities: [BrowseResultEntity!]! - start: Int! - count: Int! - total: Int! - metadata: BrowseResultMetadata! -} - -type BrowseResultEntity { - name: String! - urn: String! -} - -type BrowseResultMetadata { - path: [String!] - groups: [BrowseResultGroup!]! - totalNumEntities: Long! -} - -type BrowseResultGroup { - name: String! - count: Long! -} - -# Browse Paths Input -input BrowsePathsInput { - type: EntityType! - urn: String! -} diff --git a/datahub-web-react/public/index.html b/datahub-web-react/index.html similarity index 66% rename from datahub-web-react/public/index.html rename to datahub-web-react/index.html index ead3a0aba82cb9..bb86e2f350e1a3 100644 --- a/datahub-web-react/public/index.html +++ b/datahub-web-react/index.html @@ -2,7 +2,8 @@ - + + @@ -10,21 +11,13 @@ manifest.json provides metadata used when your web app is installed on a user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/ --> - - + DataHub

+ + \ No newline at end of file diff --git a/datahub-web-react/src/images/s3.png b/datahub-web-react/src/images/s3.png index 87fc905d745e03..3779f45e107127 100644 Binary files a/datahub-web-react/src/images/s3.png and b/datahub-web-react/src/images/s3.png differ diff --git a/datahub-web-react/src/images/s3logo.png b/datahub-web-react/src/images/s3logo.png index 87fc905d745e03..3779f45e107127 100644 Binary files a/datahub-web-react/src/images/s3logo.png and b/datahub-web-react/src/images/s3logo.png differ diff --git a/datahub-web-react/src/images/sagemakerlogo.png b/datahub-web-react/src/images/sagemakerlogo.png index fa9baf1a122971..6cad72a6f28eb7 100644 Binary files a/datahub-web-react/src/images/sagemakerlogo.png and b/datahub-web-react/src/images/sagemakerlogo.png differ diff --git a/datahub-web-react/src/images/slacklogo.png b/datahub-web-react/src/images/slacklogo.png new file mode 100644 index 00000000000000..2c8adc74b6b84b Binary files /dev/null and b/datahub-web-react/src/images/slacklogo.png differ diff --git a/datahub-web-react/src/images/tableaulogo.svg b/datahub-web-react/src/images/tableaulogo.svg new file mode 100644 index 00000000000000..9fd5b221a41884 --- /dev/null +++ b/datahub-web-react/src/images/tableaulogo.svg @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/datahub-web-react/src/images/teamslogo.png b/datahub-web-react/src/images/teamslogo.png new file mode 100644 index 00000000000000..29432fc9d02206 Binary files /dev/null and b/datahub-web-react/src/images/teamslogo.png differ diff --git a/datahub-web-react/src/images/verificationBlue.svg b/datahub-web-react/src/images/verificationBlue.svg new file mode 100644 index 00000000000000..dafdd60fb156d8 --- /dev/null +++ b/datahub-web-react/src/images/verificationBlue.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationGreen.svg b/datahub-web-react/src/images/verificationGreen.svg new file mode 100644 index 00000000000000..d082db5dfc456b --- /dev/null +++ b/datahub-web-react/src/images/verificationGreen.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationPurple.svg b/datahub-web-react/src/images/verificationPurple.svg new file mode 100644 index 00000000000000..a9549195779f06 --- /dev/null +++ b/datahub-web-react/src/images/verificationPurple.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationPurpleWhite.svg b/datahub-web-react/src/images/verificationPurpleWhite.svg new file mode 100644 index 00000000000000..c57d8b3105ebed --- /dev/null +++ b/datahub-web-react/src/images/verificationPurpleWhite.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verificationWarningGray.svg b/datahub-web-react/src/images/verificationWarningGray.svg new file mode 100644 index 00000000000000..725f448894532d --- /dev/null +++ b/datahub-web-react/src/images/verificationWarningGray.svg @@ -0,0 +1,4 @@ + + + + diff --git a/datahub-web-react/src/images/verticalogo copy.png b/datahub-web-react/src/images/verticalogo copy.png new file mode 100644 index 00000000000000..5da38f4e67c7d4 Binary files /dev/null and b/datahub-web-react/src/images/verticalogo copy.png differ diff --git a/datahub-web-react/src/index.tsx b/datahub-web-react/src/index.tsx index 6b03ec71c687ad..c3ef2105ed1351 100644 --- a/datahub-web-react/src/index.tsx +++ b/datahub-web-react/src/index.tsx @@ -1,7 +1,6 @@ import React from 'react'; import ReactDOM from 'react-dom'; -import './graphql-mock/createServer'; -import App from './App'; +import { App } from './App'; import reportWebVitals from './reportWebVitals'; ReactDOM.render( diff --git a/datahub-web-react/src/providers/EducationStepsProvider.tsx b/datahub-web-react/src/providers/EducationStepsProvider.tsx index 28dc6b91e0e82b..f254b21ea99a55 100644 --- a/datahub-web-react/src/providers/EducationStepsProvider.tsx +++ b/datahub-web-react/src/providers/EducationStepsProvider.tsx @@ -1,9 +1,8 @@ import React, { useEffect, useState } from 'react'; -import { getStepIds } from '../app/onboarding/utils'; +import { getInitialAllowListIds, getStepIds } from '../app/onboarding/utils'; import { useBatchGetStepStatesQuery } from '../graphql/step.generated'; import { EducationStepsContext } from './EducationStepsContext'; import { StepStateResult } from '../types.generated'; -import { CURRENT_ONBOARDING_IDS } from '../app/onboarding/OnboardingConfig'; import { useUserContext } from '../app/context/useUserContext'; export function EducationStepsProvider({ children }: { children: React.ReactNode }) { @@ -13,7 +12,7 @@ export function EducationStepsProvider({ children }: { children: React.ReactNode const results = data?.batchGetStepStates.results; const [educationSteps, setEducationSteps] = useState(results || null); const [educationStepIdsAllowlist, setEducationStepIdsAllowlist] = useState>( - new Set(CURRENT_ONBOARDING_IDS), + new Set(getInitialAllowListIds()), ); useEffect(() => { diff --git a/datahub-web-react/src/react-app-env.d.ts b/datahub-web-react/src/react-app-env.d.ts deleted file mode 100644 index 6431bc5fc6b2c9..00000000000000 --- a/datahub-web-react/src/react-app-env.d.ts +++ /dev/null @@ -1 +0,0 @@ -/// diff --git a/datahub-web-react/src/setupProxy.js b/datahub-web-react/src/setupProxy.js deleted file mode 100644 index 165e394a507f3b..00000000000000 --- a/datahub-web-react/src/setupProxy.js +++ /dev/null @@ -1,37 +0,0 @@ -const logInFilter = function (pathname, req) { - return pathname.match('^/logIn') && req.method === 'POST'; -}; - -const proxyTarget = process.env.REACT_APP_PROXY_TARGET || 'http://localhost:9002'; - -if (process.env.REACT_APP_MOCK === 'true' || process.env.REACT_APP_MOCK === 'cy') { - // no proxy needed, MirageJS will intercept all http requests - module.exports = function () {}; -} else { - // create a proxy to the graphql server running in docker container - const { createProxyMiddleware } = require('http-proxy-middleware'); - - module.exports = function (app) { - app.use( - '/logIn', - createProxyMiddleware(logInFilter, { - target: proxyTarget, - changeOrigin: true, - }), - ); - app.use( - '/authenticate', - createProxyMiddleware({ - target: proxyTarget, - changeOrigin: true, - }), - ); - app.use( - '/api/v2/graphql', - createProxyMiddleware({ - target: proxyTarget, - changeOrigin: true, - }), - ); - }; -} diff --git a/datahub-web-react/src/setupTests.ts b/datahub-web-react/src/setupTests.ts index a7a647ad10e01c..b4729293a2f808 100644 --- a/datahub-web-react/src/setupTests.ts +++ b/datahub-web-react/src/setupTests.ts @@ -2,8 +2,7 @@ // allows you to do things like: // expect(element).toHaveTextContent(/react/i) // learn more: https://github.com/testing-library/jest-dom -import '@testing-library/jest-dom'; -import sinon from 'sinon'; +import '@testing-library/jest-dom/vitest'; // Mock window.matchMedia interface. // See https://jestjs.io/docs/en/manual-mocks#mocking-methods-which-are-not-implemented-in-jsdom @@ -13,14 +12,22 @@ global.matchMedia = (() => { return { matches: false, - addListener: jest.fn(), - removeListener: jest.fn(), + addListener: vi.fn(), + removeListener: vi.fn(), }; }); -const { location } = window; -delete window.location; -window.location = { ...location, replace: () => {} }; -sinon.stub(window.location, 'replace'); -jest.mock('js-cookie', () => ({ get: () => 'urn:li:corpuser:2' })); -jest.mock('./app/entity/shared/tabs/Documentation/components/editor/Editor'); +window.location = { ...window.location, replace: () => {} }; + +// Suppress `Error: Not implemented: window.computedStyle(elt, pseudoElt)`. +// From https://github.com/vitest-dev/vitest/issues/2061 +// and https://github.com/NickColley/jest-axe/issues/147#issuecomment-758804533 +const { getComputedStyle } = window; +window.getComputedStyle = (elt) => getComputedStyle(elt); + +vi.mock('js-cookie', () => ({ + default: { + get: () => 'urn:li:corpuser:2', + }, +})); +vi.mock('./app/entity/shared/tabs/Documentation/components/editor/Editor'); diff --git a/datahub-web-react/src/utils/focus/index.ts b/datahub-web-react/src/utils/focus/index.ts new file mode 100644 index 00000000000000..313c00f4290181 --- /dev/null +++ b/datahub-web-react/src/utils/focus/index.ts @@ -0,0 +1,6 @@ +// To focus out from modal popup so autofocus works +// getContainer prop value when rendering modals. +// The getContainer prop allows you to specify the container in which the modal should be rendered. +// By default, modals are appended to the end of the document body, but using getContainer, you can specify a different container. +export const getModalDomContainer = () => document.getElementById('root') as HTMLElement; +// this can we remove once we upgrade to new antd version i.e 5.11.2 because there we have autoFocus property for the modal. \ No newline at end of file diff --git a/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx b/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx index 0903aeeaf4fe5b..20fd5afe593b52 100644 --- a/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx +++ b/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx @@ -56,7 +56,7 @@ export default ({ children, initialEntries }: Props) => { writable: true, value: `${CLIENT_AUTH_COOKIE}=urn:li:corpuser:2`, }); - jest.mock('js-cookie', () => ({ get: () => 'urn:li:corpuser:2' })); + vi.mock('js-cookie', () => ({ default: { get: () => 'urn:li:corpuser:2' }})); return ( diff --git a/datahub-web-react/src/vite-env.d.ts b/datahub-web-react/src/vite-env.d.ts new file mode 100644 index 00000000000000..b1f45c7866694d --- /dev/null +++ b/datahub-web-react/src/vite-env.d.ts @@ -0,0 +1,2 @@ +/// +/// diff --git a/datahub-web-react/tsconfig.json b/datahub-web-react/tsconfig.json index 760c992f2ca3ac..56361d52b21c3f 100644 --- a/datahub-web-react/tsconfig.json +++ b/datahub-web-react/tsconfig.json @@ -15,7 +15,8 @@ "resolveJsonModule": true, "isolatedModules": true, "noEmit": true, - "jsx": "react-jsx" + "jsx": "react-jsx", + "types": ["vitest/globals"] }, - "include": ["src", "src/conf/theme/styled-components.d.ts", "craco.config.js", ".eslintrc.js"] + "include": ["src", "src/conf/theme/styled-components.d.ts", "vite.config.ts", ".eslintrc.js"] } diff --git a/datahub-web-react/vite.config.ts b/datahub-web-react/vite.config.ts new file mode 100644 index 00000000000000..683b37974c85a1 --- /dev/null +++ b/datahub-web-react/vite.config.ts @@ -0,0 +1,101 @@ +import * as path from 'path'; +import { defineConfig, loadEnv } from 'vite'; +import react from '@vitejs/plugin-react'; +import svgr from 'vite-plugin-svgr'; +import macrosPlugin from 'vite-plugin-babel-macros'; +import { viteStaticCopy } from 'vite-plugin-static-copy'; + +// https://vitejs.dev/config/ +export default defineConfig(({ mode }) => { + // Via https://stackoverflow.com/a/66389044. + const env = loadEnv(mode, process.cwd(), ''); + process.env = { ...process.env, ...env }; + + // eslint-disable-next-line global-require, import/no-dynamic-require, @typescript-eslint/no-var-requires + const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); + + // Setup proxy to the datahub-frontend service. + const frontendProxy = { + target: process.env.REACT_APP_PROXY_TARGET || 'http://localhost:9002', + changeOrigin: true, + }; + const proxyOptions = { + '/logIn': frontendProxy, + '/authenticate': frontendProxy, + '/api/v2/graphql': frontendProxy, + '/track': frontendProxy, + }; + + return { + appType: 'spa', + plugins: [ + react(), + svgr(), + macrosPlugin(), + viteStaticCopy({ + targets: [ + // Self-host images by copying them to the build directory + { src: path.resolve(__dirname, 'src/images/*'), dest: 'assets/platforms' }, + // Also keep the theme json files in the build directory + { src: path.resolve(__dirname, 'src/conf/theme/*.json'), dest: 'assets/conf/theme' }, + ], + }), + viteStaticCopy({ + targets: [ + // Copy monaco-editor files to the build directory + // Because of the structured option, specifying dest . + // means that it will mirror the node_modules/... structure + // in the build directory. + { + src: 'node_modules/monaco-editor/min/vs/', + dest: '.', + }, + { + src: 'node_modules/monaco-editor/min-maps/vs/', + dest: '.', + rename: (name, ext, fullPath) => { + console.log(name, ext, fullPath); + return name; + }, + }, + ], + structured: true, + }), + ], + // optimizeDeps: { + // include: ['@ant-design/colors', '@ant-design/icons', 'lodash-es', '@ant-design/icons/es/icons'], + // }, + envPrefix: 'REACT_APP_', + build: { + outDir: 'dist', + }, + server: { + open: false, + host: false, + port: 3000, + proxy: proxyOptions, + }, + css: { + preprocessorOptions: { + less: { + javascriptEnabled: true, + // Override antd theme variables. + // https://4x.ant.design/docs/react/customize-theme#Ant-Design-Less-variables + modifyVars: themeConfig.styles, + }, + }, + }, + test: { + globals: true, + environment: 'jsdom', + setupFiles: './src/setupTests.ts', + css: true, + // reporters: ['verbose'], + coverage: { + reporter: ['text', 'json', 'html'], + include: ['src/**/*'], + exclude: [], + }, + }, + }; +}); diff --git a/datahub-web-react/yarn.lock b/datahub-web-react/yarn.lock index 41b542da97550b..aad6d84c6bd93d 100644 --- a/datahub-web-react/yarn.lock +++ b/datahub-web-react/yarn.lock @@ -2,10 +2,10 @@ # yarn lockfile v1 -"@alloc/quick-lru@^5.2.0": - version "5.2.0" - resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" - integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== +"@adobe/css-tools@^4.3.1": + version "4.3.2" + resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.3.2.tgz#a6abc715fb6884851fca9dad37fc34739a04fd11" + integrity sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw== "@ampproject/remapping@^2.2.0": version "2.2.1" @@ -126,15 +126,6 @@ lodash "^4.17.21" resize-observer-polyfill "^1.5.1" -"@apideck/better-ajv-errors@^0.3.1": - version "0.3.6" - resolved "https://registry.yarnpkg.com/@apideck/better-ajv-errors/-/better-ajv-errors-0.3.6.tgz#957d4c28e886a64a8141f7522783be65733ff097" - integrity sha512-P+ZygBLZtkp0qqOAJJVX4oX/sFo5JR3eBWwwuqHHhK0GIgQOKWrAfiAaWX0aArHkRWHMuggFEgAZNxVPwPZYaA== - dependencies: - json-schema "^0.4.0" - jsonpointer "^5.0.0" - leven "^3.1.0" - "@apollo/client@^3.3.19": version "3.3.19" resolved "https://registry.yarnpkg.com/@apollo/client/-/client-3.3.19.tgz#f1172dc9b9d7eae04c8940b047fd3b452ef92d2c" @@ -191,7 +182,7 @@ dependencies: node-fetch "^2.6.1" -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.5", "@babel/code-frame@^7.8.3": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.5.tgz#234d98e1551960604f1246e6475891a570ad5658" integrity sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ== @@ -206,7 +197,7 @@ "@babel/highlight" "^7.23.4" chalk "^2.4.2" -"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.5": +"@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.5.tgz#b1f6c86a02d85d2dd3368a2b67c09add8cd0c255" integrity sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA== @@ -216,7 +207,7 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== -"@babel/core@^7.1.0", "@babel/core@^7.11.1", "@babel/core@^7.12.3", "@babel/core@^7.14.0", "@babel/core@^7.16.0", "@babel/core@^7.7.2", "@babel/core@^7.8.0": +"@babel/core@^7.14.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.22.5.tgz#d67d9747ecf26ee7ecd3ebae1ee22225fe902a89" integrity sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg== @@ -237,6 +228,27 @@ json5 "^2.2.2" semver "^6.3.0" +"@babel/core@^7.17.7", "@babel/core@^7.21.3", "@babel/core@^7.23.2": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.2.tgz#ed10df0d580fff67c5f3ee70fd22e2e4c90a9f94" + integrity sha512-n7s51eWdaWZ3vGT2tD4T7J6eJs3QoBXydv7vkUM06Bf1cbVD2Kc2UrkzhiQwobfV7NwOnQXYL7UBJ5VPU+RGoQ== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.0" + "@babel/helpers" "^7.23.2" + "@babel/parser" "^7.23.0" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.2" + "@babel/types" "^7.23.0" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + "@babel/core@^7.22.9": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.5.tgz#6e23f2acbcb77ad283c5ed141f824fd9f70101c7" @@ -258,16 +270,7 @@ json5 "^2.2.3" semver "^6.3.1" -"@babel/eslint-parser@^7.16.3": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/eslint-parser/-/eslint-parser-7.22.5.tgz#fa032503b9e2d188e25b1b95d29e8b8431042d78" - integrity sha512-C69RWYNYtrgIRE5CmTd77ZiLDXqgBipahJc/jHP3sLcAGj6AJzxNIuKNpVnICqbyK7X3pFUfEvL++rvtbQpZkQ== - dependencies: - "@nicolo-ribaudo/eslint-scope-5-internals" "5.1.1-v1" - eslint-visitor-keys "^2.1.0" - semver "^6.3.0" - -"@babel/generator@^7.14.0", "@babel/generator@^7.22.5", "@babel/generator@^7.7.2": +"@babel/generator@^7.14.0", "@babel/generator@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.22.5.tgz#1e7bf768688acfb05cf30b2369ef855e82d984f7" integrity sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA== @@ -287,21 +290,24 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/helper-annotate-as-pure@^7.0.0", "@babel/helper-annotate-as-pure@^7.18.6", "@babel/helper-annotate-as-pure@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" - integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== +"@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== dependencies: - "@babel/types" "^7.22.5" + "@babel/types" "^7.23.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" -"@babel/helper-builder-binary-assignment-operator-visitor@^7.22.5": +"@babel/helper-annotate-as-pure@^7.0.0", "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.5.tgz#a3f4758efdd0190d8927fcffd261755937c71878" - integrity sha512-m1EP3lVOPptR+2DwD125gziZNcmoNSHGmJROKoy87loWUQyJaVXDgpmruWqDARZSmtYQ+Dl25okU8+qhVzuykw== + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.22.5": +"@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz#fc7319fc54c5e2fa14b2909cf3c5fd3046813e02" integrity sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw== @@ -323,7 +329,7 @@ lru-cache "^5.1.1" semver "^6.3.1" -"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0", "@babel/helper-create-class-features-plugin@^7.22.5": +"@babel/helper-create-class-features-plugin@^7.18.6": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.22.5.tgz#2192a1970ece4685fbff85b48da2c32fcb130b7c" integrity sha512-xkb58MyOYIslxu3gKmVXmjTtUPvBU4odYzbiIQbWwLKIHCsx6UGZGX6F1IznMFVnDdirseUZopzN+ZRt8Xb33Q== @@ -338,27 +344,6 @@ "@babel/helper-split-export-declaration" "^7.22.5" semver "^6.3.0" -"@babel/helper-create-regexp-features-plugin@^7.12.13", "@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.5.tgz#bb2bf0debfe39b831986a4efbf4066586819c6e4" - integrity sha512-1VpEFOIbMRaXyDeUwUfmTIxExLwQ+zkW+Bh5zXpApA3oQedBx9v/updixWxnx/bZpKw7u8VxWjb/qWpIcmPq8A== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - regexpu-core "^5.3.1" - semver "^6.3.0" - -"@babel/helper-define-polyfill-provider@^0.4.0": - version "0.4.0" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.4.0.tgz#487053f103110f25b9755c5980e031e93ced24d8" - integrity sha512-RnanLx5ETe6aybRi1cO/edaRH+bNYWaryCEmjDDYyNr4wnSzyOp8T0dWipmqVHKEY3AbVKUom50AKSlj1zmKbg== - dependencies: - "@babel/helper-compilation-targets" "^7.17.7" - "@babel/helper-plugin-utils" "^7.16.7" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - semver "^6.1.2" - "@babel/helper-environment-visitor@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" @@ -399,7 +384,7 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.10.4", "@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.22.5": +"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz#1a8f4c9f4027d23f520bd76b364d44434a72660c" integrity sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg== @@ -427,6 +412,17 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-module-transforms@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.0.tgz#3ec246457f6c842c0aee62a01f60739906f7047e" + integrity sha512-WhDWw1tdrlT0gMgUJSlX0IQvoO1eN279zrAUbVB+KpV2c3Tylz8+GnKOLllCS6Z/iZQEyVYxhZVUdPTqs2YYPw== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-module-transforms@^7.23.3": version "7.23.3" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" @@ -445,21 +441,11 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": +"@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== -"@babel/helper-remap-async-to-generator@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.5.tgz#14a38141a7bf2165ad38da61d61cf27b43015da2" - integrity sha512-cU0Sq1Rf4Z55fgz7haOakIyM7+x/uCFwXpLPaeRzfoUtAEAuUZjZvFPjL/rk5rW693dIgn2hng1W7xbT7lWT4g== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-wrap-function" "^7.22.5" - "@babel/types" "^7.22.5" - "@babel/helper-replace-supers@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.5.tgz#71bc5fb348856dea9fdc4eafd7e2e49f585145dc" @@ -479,7 +465,7 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-skip-transparent-expression-wrappers@^7.20.0", "@babel/helper-skip-transparent-expression-wrappers@^7.22.5": +"@babel/helper-skip-transparent-expression-wrappers@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== @@ -530,16 +516,6 @@ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz#de52000a15a177413c8234fa3a8af4ee8102d0ac" integrity sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw== -"@babel/helper-wrap-function@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.5.tgz#44d205af19ed8d872b4eefb0d2fa65f45eb34f06" - integrity sha512-bYqLIBSEshYcYQyfks8ewYA8S30yaGSeRslcvKMvoUk6HHPySbxHq9YRi6ghhzEU+yhQv9bP/jXnygkStOcqZw== - dependencies: - "@babel/helper-function-name" "^7.22.5" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.5" - "@babel/types" "^7.22.5" - "@babel/helpers@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.22.5.tgz#74bb4373eb390d1ceed74a15ef97767e63120820" @@ -549,6 +525,15 @@ "@babel/traverse" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helpers@^7.23.2": + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.2.tgz#2832549a6e37d484286e15ba36a5330483cac767" + integrity sha512-lzchcp8SjTSVe/fPmLwtWVBFC7+Tbn8LGHDVfDp9JGxpAY5opSaEFgt8UQvrnECWOTdji2mOWMz1rOhkHscmGQ== + dependencies: + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.2" + "@babel/types" "^7.23.0" + "@babel/helpers@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.5.tgz#52f522840df8f1a848d06ea6a79b79eefa72401e" @@ -576,7 +561,7 @@ chalk "^2.4.2" js-tokens "^4.0.0" -"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.14.7", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": +"@babel/parser@^7.1.0", "@babel/parser@^7.1.6", "@babel/parser@^7.14.0", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.5.tgz#721fd042f3ce1896238cf1b341c77eb7dee7dbea" integrity sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q== @@ -586,23 +571,12 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" integrity sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ== -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" - integrity sha512-NP1M5Rf+u2Gw9qfSO4ihjcTGW5zXTi36ITLd4/EoAcEhIZ0yjMqmftDNl3QC19CX7olhrjpyU454g/2W7X0jvQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.22.5.tgz#fef09f9499b1f1c930da8a0c419db42167d792ca" - integrity sha512-31Bb65aZaUwqCbWMnZPduIZxCBngHFlzyN6Dq6KAJjtx+lx6ohKHubc61OomYi7XwVD4Ol0XCVz4h+pYFR048g== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" - "@babel/plugin-transform-optional-chaining" "^7.22.5" +"@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== -"@babel/plugin-proposal-class-properties@^7.0.0", "@babel/plugin-proposal-class-properties@^7.16.0": +"@babel/plugin-proposal-class-properties@^7.0.0": version "7.18.6" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== @@ -610,33 +584,6 @@ "@babel/helper-create-class-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-proposal-decorators@^7.16.4": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.22.5.tgz#dc8cdda048e5aea947efda920e030199806b868d" - integrity sha512-h8hlezQ4dl6ixodgXkH8lUfcD7x+WAuIqPUjwGoItynrXOAv4a4Tci1zA/qjzQjjcl0v3QpLdc2LM6ZACQuY7A== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.5" - "@babel/helper-split-export-declaration" "^7.22.5" - "@babel/plugin-syntax-decorators" "^7.22.5" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.16.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" - integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - -"@babel/plugin-proposal-numeric-separator@^7.16.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" - integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-proposal-object-rest-spread@^7.0.0": version "7.20.7" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" @@ -648,95 +595,13 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-transform-parameters" "^7.20.7" -"@babel/plugin-proposal-optional-chaining@^7.16.0": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz#886f5c8978deb7d30f678b2e24346b287234d3ea" - integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== - dependencies: - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -"@babel/plugin-proposal-private-methods@^7.16.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" - integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": - version "7.21.0-placeholder-for-preset-env.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" - integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== - -"@babel/plugin-proposal-private-property-in-object@^7.21.11": - version "7.21.11" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.11.tgz#69d597086b6760c4126525cfa154f34631ff272c" - integrity sha512-0QZ8qP/3RLDVBwBFoWAwCtgcDZJVwA5LUJRZU8x2YFfKNuFq161wK3cuGrALu5yiPu+vzwTAg/sMWVNeWeNyaw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-create-class-features-plugin" "^7.21.0" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.12.13.tgz#bebde51339be829c17aaaaced18641deb62b39ba" - integrity sha512-XyJmZidNfofEkqFV5VC/bLabGmO5QzenPO/YOfGuEbgU+2sSwMmio3YLb4WtBgcmmdwZHyVyv8on77IUjQ5Gvg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.12.13" - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.0.0", "@babel/plugin-syntax-class-properties@^7.12.13", "@babel/plugin-syntax-class-properties@^7.8.3": +"@babel/plugin-syntax-class-properties@^7.0.0": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-class-static-block@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" - integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-decorators@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.22.5.tgz#329fe2907c73de184033775637dbbc507f09116a" - integrity sha512-avpUOBS7IU6al8MmF1XpAyj9QYeLPuSDJI5D4pVMSMdL7xQokKqJPYQC67RCT0aCTashUXPiGwMJ0DEXXCEmMA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-syntax-dynamic-import@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.3" - "@babel/plugin-syntax-flow@^7.0.0", "@babel/plugin-syntax-flow@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.22.5.tgz#163b820b9e7696ce134df3ee716d9c0c98035859" @@ -751,62 +616,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-import-assertions@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.22.5.tgz#07d252e2aa0bc6125567f742cd58619cb14dce98" - integrity sha512-rdV97N7KqsRzeNGoWUOK6yUsWarLjE5Su/Snk9IYPU9CwkWHs4t+rTGOvffTR8XGkJMTAdLfO0xVnXm8wugIJg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-syntax-import-attributes@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.22.5.tgz#ab840248d834410b829f569f5262b9e517555ecb" - integrity sha512-KwvoWDeNKPETmozyFE0P2rOLqh39EoQHNjqizrI5B8Vt0ZNS7M56s7dAiAqbYfiAYOuIzIh96z3iR2ktgu3tEg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-syntax-import-meta@^7.10.4", "@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.17.12", "@babel/plugin-syntax-jsx@^7.22.5": +"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.16.7", "@babel/plugin-syntax-jsx@^7.17.12", "@babel/plugin-syntax-jsx@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz#a6b68e84fb76e759fc3b93e901876ffabbe1d918" integrity sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread@^7.0.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" @@ -814,107 +630,35 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-private-property-in-object@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" - integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-top-level-await@^7.14.5", "@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.22.5", "@babel/plugin-syntax-typescript@^7.7.2": +"@babel/plugin-syntax-typescript@^7.16.7": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz#aac8d383b062c5072c647a31ef990c1d0af90272" integrity sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-syntax-unicode-sets-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" - integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-arrow-functions@^7.0.0", "@babel/plugin-transform-arrow-functions@^7.22.5": +"@babel/plugin-transform-arrow-functions@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.22.5.tgz#e5ba566d0c58a5b2ba2a8b795450641950b71958" integrity sha512-26lTNXoVRdAnsaDXPpvCNUq+OVWEVC6bx7Vvz9rC53F2bagUWW4u4ii2+h8Fejfh7RYqPxn+libeFBBck9muEw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-async-generator-functions@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.22.5.tgz#7336356d23380eda9a56314974f053a020dab0c3" - integrity sha512-gGOEvFzm3fWoyD5uZq7vVTD57pPJ3PczPUD/xCFGjzBpUosnklmXyKnGQbbbGs1NPNPskFex0j93yKbHt0cHyg== - dependencies: - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-remap-async-to-generator" "^7.22.5" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-transform-async-to-generator@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.22.5.tgz#c7a85f44e46f8952f6d27fe57c2ed3cc084c3775" - integrity sha512-b1A8D8ZzE/VhNDoV1MSJTnpKkCG5bJo+19R4o4oy03zM7ws8yEMK755j61Dc3EyvdysbqH5BOOTquJ7ZX9C6vQ== - dependencies: - "@babel/helper-module-imports" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-remap-async-to-generator" "^7.22.5" - -"@babel/plugin-transform-block-scoped-functions@^7.0.0", "@babel/plugin-transform-block-scoped-functions@^7.22.5": +"@babel/plugin-transform-block-scoped-functions@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.22.5.tgz#27978075bfaeb9fa586d3cb63a3d30c1de580024" integrity sha512-tdXZ2UdknEKQWKJP1KMNmuF5Lx3MymtMN/pvA+p/VEkhK8jVcQ1fzSy8KM9qRYhAf2/lV33hoMPKI/xaI9sADA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-block-scoping@^7.0.0", "@babel/plugin-transform-block-scoping@^7.22.5": +"@babel/plugin-transform-block-scoping@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.22.5.tgz#8bfc793b3a4b2742c0983fadc1480d843ecea31b" integrity sha512-EcACl1i5fSQ6bt+YGuU/XGCeZKStLmyVGytWkpyhCLeQVA0eu6Wtiw92V+I1T/hnezUv7j74dA/Ro69gWcU+hg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-class-properties@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.22.5.tgz#97a56e31ad8c9dc06a0b3710ce7803d5a48cca77" - integrity sha512-nDkQ0NfkOhPTq8YCLiWNxp1+f9fCobEjCb0n8WdbNUBc4IB5V7P1QnX9IjpSoquKrXF5SKojHleVNs2vGeHCHQ== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-class-static-block@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.22.5.tgz#3e40c46f048403472d6f4183116d5e46b1bff5ba" - integrity sha512-SPToJ5eYZLxlnp1UzdARpOGeC2GbHvr9d/UV0EukuVx8atktg194oe+C5BqQ8jRTkgLRVOPYeXRSBg1IlMoVRA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - -"@babel/plugin-transform-classes@^7.0.0", "@babel/plugin-transform-classes@^7.22.5": +"@babel/plugin-transform-classes@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.22.5.tgz#635d4e98da741fad814984639f4c0149eb0135e1" integrity sha512-2edQhLfibpWpsVBx2n/GKOz6JdGQvLruZQfGr9l1qes2KQaWswjBzhQF7UDUZMNaMMQeYnQzxwOMPsbYF7wqPQ== @@ -929,7 +673,7 @@ "@babel/helper-split-export-declaration" "^7.22.5" globals "^11.1.0" -"@babel/plugin-transform-computed-properties@^7.0.0", "@babel/plugin-transform-computed-properties@^7.22.5": +"@babel/plugin-transform-computed-properties@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.22.5.tgz#cd1e994bf9f316bd1c2dafcd02063ec261bb3869" integrity sha512-4GHWBgRf0krxPX+AaPtgBAlTgTeZmqDynokHOX7aqqAB4tHs3U2Y02zH6ETFdLZGcg9UQSD1WCmkVrE9ErHeOg== @@ -937,53 +681,14 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/template" "^7.22.5" -"@babel/plugin-transform-destructuring@^7.0.0", "@babel/plugin-transform-destructuring@^7.22.5": +"@babel/plugin-transform-destructuring@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.22.5.tgz#d3aca7438f6c26c78cdd0b0ba920a336001b27cc" integrity sha512-GfqcFuGW8vnEqTUBM7UtPd5A4q797LTvvwKxXTgRsFjoqaJiEg9deBG6kWeQYkVEL569NpnmpC0Pkr/8BLKGnQ== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-dotall-regex@^7.22.5", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.22.5.tgz#dbb4f0e45766eb544e193fb00e65a1dd3b2a4165" - integrity sha512-5/Yk9QxCQCl+sOIB1WelKnVRxTJDSAIxtJLL2/pqL14ZVlbH0fUQUZa/T5/UnQtBNgghR7mfB8ERBKyKPCi7Vw== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-duplicate-keys@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.22.5.tgz#b6e6428d9416f5f0bba19c70d1e6e7e0b88ab285" - integrity sha512-dEnYD+9BBgld5VBXHnF/DbYGp3fqGMsyxKbtD1mDyIA7AkTSpKXFhCVuj/oQVOoALfBs77DudA0BE4d5mcpmqw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-dynamic-import@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.22.5.tgz#d6908a8916a810468c4edff73b5b75bda6ad393e" - integrity sha512-0MC3ppTB1AMxd8fXjSrbPa7LT9hrImt+/fcj+Pg5YMD7UQyWp/02+JWpdnCymmsXwIx5Z+sYn1bwCn4ZJNvhqQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - -"@babel/plugin-transform-exponentiation-operator@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.22.5.tgz#402432ad544a1f9a480da865fda26be653e48f6a" - integrity sha512-vIpJFNM/FjZ4rh1myqIya9jXwrwwgFRHPjT3DkUA9ZLHuzox8jiXkOLvwm1H+PQIP3CqfC++WPKeuDi0Sjdj1g== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-export-namespace-from@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.22.5.tgz#57c41cb1d0613d22f548fddd8b288eedb9973a5b" - integrity sha512-X4hhm7FRnPgd4nDA4b/5V280xCx6oL7Oob5+9qVS5C13Zq4bh1qq7LU0GgRU6b5dBWBvhGaXYVB4AcN6+ol6vg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-transform-flow-strip-types@^7.0.0", "@babel/plugin-transform-flow-strip-types@^7.16.0": +"@babel/plugin-transform-flow-strip-types@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.22.5.tgz#0bb17110c7bf5b35a60754b2f00c58302381dee2" integrity sha512-tujNbZdxdG0/54g/oua8ISToaXTFBf8EnSb5PgQSciIXWOWKX3S4+JR7ZE9ol8FZwf9kxitzkGQ+QWeov/mCiA== @@ -991,14 +696,14 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-flow" "^7.22.5" -"@babel/plugin-transform-for-of@^7.0.0", "@babel/plugin-transform-for-of@^7.22.5": +"@babel/plugin-transform-for-of@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.22.5.tgz#ab1b8a200a8f990137aff9a084f8de4099ab173f" integrity sha512-3kxQjX1dU9uudwSshyLeEipvrLjBCVthCgeTp6CzE/9JYrlAIaeekVxRpCWsDDfYTfRZRoCeZatCQvwo+wvK8A== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-function-name@^7.0.0", "@babel/plugin-transform-function-name@^7.22.5": +"@babel/plugin-transform-function-name@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.22.5.tgz#935189af68b01898e0d6d99658db6b164205c143" integrity sha512-UIzQNMS0p0HHiQm3oelztj+ECwFnj+ZRV4KnguvlsD2of1whUeM6o7wGNj6oLwcDoAXQ8gEqfgC24D+VdIcevg== @@ -1007,45 +712,21 @@ "@babel/helper-function-name" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-json-strings@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.22.5.tgz#14b64352fdf7e1f737eed68de1a1468bd2a77ec0" - integrity sha512-DuCRB7fu8MyTLbEQd1ew3R85nx/88yMoqo2uPSjevMj3yoN7CDM8jkgrY0wmVxfJZyJ/B9fE1iq7EQppWQmR5A== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-transform-literals@^7.0.0", "@babel/plugin-transform-literals@^7.22.5": +"@babel/plugin-transform-literals@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.22.5.tgz#e9341f4b5a167952576e23db8d435849b1dd7920" integrity sha512-fTLj4D79M+mepcw3dgFBTIDYpbcB9Sm0bpm4ppXPaO+U+PKFFyV9MGRvS0gvGw62sd10kT5lRMKXAADb9pWy8g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-logical-assignment-operators@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.22.5.tgz#66ae5f068fd5a9a5dc570df16f56c2a8462a9d6c" - integrity sha512-MQQOUW1KL8X0cDWfbwYP+TbVbZm16QmQXJQ+vndPtH/BoO0lOKpVoEDMI7+PskYxH+IiE0tS8xZye0qr1lGzSA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-transform-member-expression-literals@^7.0.0", "@babel/plugin-transform-member-expression-literals@^7.22.5": +"@babel/plugin-transform-member-expression-literals@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.22.5.tgz#4fcc9050eded981a468347dd374539ed3e058def" integrity sha512-RZEdkNtzzYCFl9SE9ATaUMTj2hqMb4StarOJLrZRbqqU4HSBE7UlBw9WBWQiDzrJZJdUWiMTVDI6Gv/8DPvfew== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-modules-amd@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.22.5.tgz#4e045f55dcf98afd00f85691a68fc0780704f526" - integrity sha512-R+PTfLTcYEmb1+kK7FNkhQ1gP4KgjpSO6HfH9+f8/yfp2Nt3ggBjiVpRwmwTlfqZLafYKJACy36yDXlEmI9HjQ== - dependencies: - "@babel/helper-module-transforms" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-modules-commonjs@^7.0.0", "@babel/plugin-transform-modules-commonjs@^7.22.5": +"@babel/plugin-transform-modules-commonjs@^7.0.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.22.5.tgz#7d9875908d19b8c0536085af7b053fd5bd651bfa" integrity sha512-B4pzOXj+ONRmuaQTg05b3y/4DuFz3WcCNAXPLb2Q0GT0TrGKGxNKV4jwsXts+StaM0LQczZbOpj8o1DLPDJIiA== @@ -1054,408 +735,90 @@ "@babel/helper-plugin-utils" "^7.22.5" "@babel/helper-simple-access" "^7.22.5" -"@babel/plugin-transform-modules-systemjs@^7.22.5": +"@babel/plugin-transform-object-super@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.22.5.tgz#18c31410b5e579a0092638f95c896c2a98a5d496" - integrity sha512-emtEpoaTMsOs6Tzz+nbmcePl6AKVtS1yC4YNAeMun9U8YCsgadPNxnOPQ8GhHFB2qdx+LZu9LgoC0Lthuu05DQ== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz#794a8d2fcb5d0835af722173c1a9d704f44e218c" + integrity sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw== dependencies: - "@babel/helper-hoist-variables" "^7.22.5" - "@babel/helper-module-transforms" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-identifier" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.5" -"@babel/plugin-transform-modules-umd@^7.22.5": +"@babel/plugin-transform-parameters@^7.0.0", "@babel/plugin-transform-parameters@^7.20.7": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.22.5.tgz#4694ae40a87b1745e3775b6a7fe96400315d4f98" - integrity sha512-+S6kzefN/E1vkSsKx8kmQuqeQsvCKCd1fraCM7zXm4SFoggI099Tr4G8U81+5gtMdUeMQ4ipdQffbKLX0/7dBQ== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.5.tgz#c3542dd3c39b42c8069936e48717a8d179d63a18" + integrity sha512-AVkFUBurORBREOmHRKo06FjHYgjrabpdqRSwq6+C7R5iTCZOsM4QbcB27St0a4U6fffyAOqh3s/qEfybAhfivg== dependencies: - "@babel/helper-module-transforms" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-named-capturing-groups-regex@^7.22.5": +"@babel/plugin-transform-property-literals@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" - integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz#b5ddabd73a4f7f26cd0e20f5db48290b88732766" + integrity sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-new-target@^7.22.5": +"@babel/plugin-transform-react-display-name@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.22.5.tgz#1b248acea54ce44ea06dfd37247ba089fcf9758d" - integrity sha512-AsF7K0Fx/cNKVyk3a+DW0JLo+Ua598/NxMRvxDnkpCIGFh43+h/v2xyhRUYf6oD8gE4QtL83C7zZVghMjHd+iw== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz#3c4326f9fce31c7968d6cb9debcaf32d9e279a2b" + integrity sha512-PVk3WPYudRF5z4GKMEYUrLjPl38fJSKNaEOkFuoprioowGuWN6w2RKznuFNSlJx7pzzXXStPUnNSOEO0jL5EVw== dependencies: "@babel/helper-plugin-utils" "^7.22.5" -"@babel/plugin-transform-nullish-coalescing-operator@^7.22.5": +"@babel/plugin-transform-react-jsx-self@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.22.5.tgz#f8872c65776e0b552e0849d7596cddd416c3e381" - integrity sha512-6CF8g6z1dNYZ/VXok5uYkkBBICHZPiGEl7oDnAx2Mt1hlHVHOSIKWJaXHjQJA5VB43KZnXZDIexMchY4y2PGdA== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.22.5.tgz#ca2fdc11bc20d4d46de01137318b13d04e481d8e" + integrity sha512-nTh2ogNUtxbiSbxaT4Ds6aXnXEipHweN9YRgOX/oNXdf0cCrGn/+2LozFa3lnPV5D90MkjhgckCPBrsoSc1a7g== dependencies: "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" -"@babel/plugin-transform-numeric-separator@^7.22.5": +"@babel/plugin-transform-react-jsx-source@^7.22.5": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.22.5.tgz#57226a2ed9e512b9b446517ab6fa2d17abb83f58" - integrity sha512-NbslED1/6M+sXiwwtcAB/nieypGw02Ejf4KtDeMkCEpP6gWFMX1wI9WKYua+4oBneCCEmulOkRpwywypVZzs/g== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.22.5.tgz#49af1615bfdf6ed9d3e9e43e425e0b2b65d15b6c" + integrity sha512-yIiRO6yobeEIaI0RTbIr8iAK9FcBHLtZq0S89ZPjDLQXBA4xvghaKqI0etp/tF3htTM0sazJKKLz9oEiGRtu7w== dependencies: "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" -"@babel/plugin-transform-object-rest-spread@^7.22.5": +"@babel/plugin-transform-react-jsx@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.22.5.tgz#9686dc3447df4753b0b2a2fae7e8bc33cdc1f2e1" - integrity sha512-Kk3lyDmEslH9DnvCDA1s1kkd3YWQITiBOHngOtDL9Pt6BZjzqb6hiOlb8VfjiiQJ2unmegBqZu0rx5RxJb5vmQ== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416" + integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA== dependencies: - "@babel/compat-data" "^7.22.5" - "@babel/helper-compilation-targets" "^7.22.5" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-module-imports" "^7.22.5" "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.22.5" + "@babel/plugin-syntax-jsx" "^7.22.5" + "@babel/types" "^7.22.5" -"@babel/plugin-transform-object-super@^7.0.0", "@babel/plugin-transform-object-super@^7.22.5": +"@babel/plugin-transform-shorthand-properties@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.22.5.tgz#794a8d2fcb5d0835af722173c1a9d704f44e218c" - integrity sha512-klXqyaT9trSjIUrcsYIfETAzmOEZL3cBYqOYLJxBHfMFFggmXOv+NYSX/Jbs9mzMVESw/WycLFPRx8ba/b2Ipw== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz#6e277654be82b5559fc4b9f58088507c24f0c624" + integrity sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-replace-supers" "^7.22.5" -"@babel/plugin-transform-optional-catch-binding@^7.22.5": +"@babel/plugin-transform-spread@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.22.5.tgz#842080be3076703be0eaf32ead6ac8174edee333" - integrity sha512-pH8orJahy+hzZje5b8e2QIlBWQvGpelS76C63Z+jhZKsmzfNaPQ+LaW6dcJ9bxTpo1mtXbgHwy765Ro3jftmUg== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz#6487fd29f229c95e284ba6c98d65eafb893fea6b" + integrity sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg== dependencies: "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" -"@babel/plugin-transform-optional-chaining@^7.22.5": +"@babel/plugin-transform-template-literals@^7.0.0": version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.22.5.tgz#1003762b9c14295501beb41be72426736bedd1e0" - integrity sha512-AconbMKOMkyG+xCng2JogMCDcqW8wedQAqpVIL4cOSescZ7+iW8utC6YDZLMCSUIReEA733gzRSaOSXMAt/4WQ== + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz#8f38cf291e5f7a8e60e9f733193f0bcc10909bff" + integrity sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA== dependencies: "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" -"@babel/plugin-transform-parameters@^7.0.0", "@babel/plugin-transform-parameters@^7.20.7", "@babel/plugin-transform-parameters@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.22.5.tgz#c3542dd3c39b42c8069936e48717a8d179d63a18" - integrity sha512-AVkFUBurORBREOmHRKo06FjHYgjrabpdqRSwq6+C7R5iTCZOsM4QbcB27St0a4U6fffyAOqh3s/qEfybAhfivg== +"@babel/runtime@7.13.10": + version "7.13.10" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.10.tgz#47d42a57b6095f4468da440388fdbad8bebf0d7d" + integrity sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw== dependencies: - "@babel/helper-plugin-utils" "^7.22.5" + regenerator-runtime "^0.13.4" -"@babel/plugin-transform-private-methods@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.22.5.tgz#21c8af791f76674420a147ae62e9935d790f8722" - integrity sha512-PPjh4gyrQnGe97JTalgRGMuU4icsZFnWkzicB/fUtzlKUqvsWBKEpPPfr5a2JiyirZkHxnAqkQMO5Z5B2kK3fA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-private-property-in-object@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.22.5.tgz#07a77f28cbb251546a43d175a1dda4cf3ef83e32" - integrity sha512-/9xnaTTJcVoBtSSmrVyhtSvO3kbqS2ODoh2juEU72c3aYonNF0OMGiaz2gjukyKM2wBBYJP38S4JiE0Wfb5VMQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-create-class-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-transform-property-literals@^7.0.0", "@babel/plugin-transform-property-literals@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.22.5.tgz#b5ddabd73a4f7f26cd0e20f5db48290b88732766" - integrity sha512-TiOArgddK3mK/x1Qwf5hay2pxI6wCZnvQqrFSqbtg1GLl2JcNMitVH/YnqjP+M31pLUeTfzY1HAXFDnUBV30rQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-react-constant-elements@^7.12.1": - version "7.13.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.13.13.tgz#0208b1d942bf939cd4f7aa5b255d42602aa4a920" - integrity sha512-SNJU53VM/SjQL0bZhyU+f4kJQz7bQQajnrZRSaU21hruG/NWY41AEM9AWXeXX90pYr/C2yAmTgI6yW3LlLrAUQ== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - -"@babel/plugin-transform-react-display-name@^7.0.0", "@babel/plugin-transform-react-display-name@^7.16.0", "@babel/plugin-transform-react-display-name@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.22.5.tgz#3c4326f9fce31c7968d6cb9debcaf32d9e279a2b" - integrity sha512-PVk3WPYudRF5z4GKMEYUrLjPl38fJSKNaEOkFuoprioowGuWN6w2RKznuFNSlJx7pzzXXStPUnNSOEO0jL5EVw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-react-jsx-development@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.22.5.tgz#e716b6edbef972a92165cd69d92f1255f7e73e87" - integrity sha512-bDhuzwWMuInwCYeDeMzyi7TaBgRQei6DqxhbyniL7/VG4RSS7HtSL2QbY4eESy1KJqlWt8g3xeEBGPuo+XqC8A== - dependencies: - "@babel/plugin-transform-react-jsx" "^7.22.5" - -"@babel/plugin-transform-react-jsx@^7.0.0", "@babel/plugin-transform-react-jsx@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.22.5.tgz#932c291eb6dd1153359e2a90cb5e557dcf068416" - integrity sha512-rog5gZaVbUip5iWDMTYbVM15XQq+RkUKhET/IHR6oizR+JEoN6CAfTTuHcK4vwUyzca30qqHqEpzBOnaRMWYMA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-module-imports" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-jsx" "^7.22.5" - "@babel/types" "^7.22.5" - -"@babel/plugin-transform-react-pure-annotations@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.22.5.tgz#1f58363eef6626d6fa517b95ac66fe94685e32c0" - integrity sha512-gP4k85wx09q+brArVinTXhWiyzLl9UpmGva0+mWyKxk6JZequ05x3eUcIUE+FyttPKJFRRVtAvQaJ6YF9h1ZpA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-regenerator@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.22.5.tgz#cd8a68b228a5f75fa01420e8cc2fc400f0fc32aa" - integrity sha512-rR7KePOE7gfEtNTh9Qw+iO3Q/e4DEsoQ+hdvM6QUDH7JRJ5qxq5AA52ZzBWbI5i9lfNuvySgOGP8ZN7LAmaiPw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - regenerator-transform "^0.15.1" - -"@babel/plugin-transform-reserved-words@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.22.5.tgz#832cd35b81c287c4bcd09ce03e22199641f964fb" - integrity sha512-DTtGKFRQUDm8svigJzZHzb/2xatPc6TzNvAIJ5GqOKDsGFYgAskjRulbR/vGsPKq3OPqtexnz327qYpP57RFyA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-runtime@^7.16.4": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.22.5.tgz#ca975fb5e260044473c8142e1b18b567d33c2a3b" - integrity sha512-bg4Wxd1FWeFx3daHFTWk1pkSWK/AyQuiyAoeZAOkAOUBjnZPH6KT7eMxouV47tQ6hl6ax2zyAWBdWZXbrvXlaw== - dependencies: - "@babel/helper-module-imports" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - babel-plugin-polyfill-corejs2 "^0.4.3" - babel-plugin-polyfill-corejs3 "^0.8.1" - babel-plugin-polyfill-regenerator "^0.5.0" - semver "^6.3.0" - -"@babel/plugin-transform-shorthand-properties@^7.0.0", "@babel/plugin-transform-shorthand-properties@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.22.5.tgz#6e277654be82b5559fc4b9f58088507c24f0c624" - integrity sha512-vM4fq9IXHscXVKzDv5itkO1X52SmdFBFcMIBZ2FRn2nqVYqw6dBexUgMvAjHW+KXpPPViD/Yo3GrDEBaRC0QYA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-spread@^7.0.0", "@babel/plugin-transform-spread@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.22.5.tgz#6487fd29f229c95e284ba6c98d65eafb893fea6b" - integrity sha512-5ZzDQIGyvN4w8+dMmpohL6MBo+l2G7tfC/O2Dg7/hjpgeWvUx8FzfeOKxGog9IimPa4YekaQ9PlDqTLOljkcxg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" - -"@babel/plugin-transform-sticky-regex@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.22.5.tgz#295aba1595bfc8197abd02eae5fc288c0deb26aa" - integrity sha512-zf7LuNpHG0iEeiyCNwX4j3gDg1jgt1k3ZdXBKbZSoA3BbGQGvMiSvfbZRR3Dr3aeJe3ooWFZxOOG3IRStYp2Bw== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-template-literals@^7.0.0", "@babel/plugin-transform-template-literals@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.22.5.tgz#8f38cf291e5f7a8e60e9f733193f0bcc10909bff" - integrity sha512-5ciOehRNf+EyUeewo8NkbQiUs4d6ZxiHo6BcBcnFlgiJfu16q0bQUw9Jvo0b0gBKFG1SMhDSjeKXSYuJLeFSMA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-typeof-symbol@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.22.5.tgz#5e2ba478da4b603af8673ff7c54f75a97b716b34" - integrity sha512-bYkI5lMzL4kPii4HHEEChkD0rkc+nvnlR6+o/qdqR6zrm0Sv/nodmyLhlq2DO0YKLUNd2VePmPRjJXSBh9OIdA== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-typescript@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.22.5.tgz#5c0f7adfc1b5f38c4dbc8f79b1f0f8074134bd7d" - integrity sha512-SMubA9S7Cb5sGSFFUlqxyClTA9zWJ8qGQrppNUm05LtFuN1ELRFNndkix4zUJrC9F+YivWwa1dHMSyo0e0N9dA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.22.5" - "@babel/helper-create-class-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/plugin-syntax-typescript" "^7.22.5" - -"@babel/plugin-transform-unicode-escapes@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.22.5.tgz#ce0c248522b1cb22c7c992d88301a5ead70e806c" - integrity sha512-biEmVg1IYB/raUO5wT1tgfacCef15Fbzhkx493D3urBI++6hpJ+RFG4SrWMn0NEZLfvilqKf3QDrRVZHo08FYg== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-unicode-property-regex@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.22.5.tgz#098898f74d5c1e86660dc112057b2d11227f1c81" - integrity sha512-HCCIb+CbJIAE6sXn5CjFQXMwkCClcOfPCzTlilJ8cUatfzwHlWQkbtV0zD338u9dZskwvuOYTuuaMaA8J5EI5A== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-unicode-regex@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.22.5.tgz#ce7e7bb3ef208c4ff67e02a22816656256d7a183" - integrity sha512-028laaOKptN5vHJf9/Arr/HiJekMd41hOEZYvNsrsXqJ7YPYuX2bQxh31fkZzGmq3YqHRJzYFFAVYvKfMPKqyg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/plugin-transform-unicode-sets-regex@^7.22.5": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.22.5.tgz#77788060e511b708ffc7d42fdfbc5b37c3004e91" - integrity sha512-lhMfi4FC15j13eKrh3DnYHjpGj6UKQHtNKTbtc1igvAhRy4+kLhV07OpLcsN0VgDEw/MjAvJO4BdMJsHwMhzCg== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - -"@babel/preset-env@^7.11.0", "@babel/preset-env@^7.12.1", "@babel/preset-env@^7.16.4": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.22.5.tgz#3da66078b181f3d62512c51cf7014392c511504e" - integrity sha512-fj06hw89dpiZzGZtxn+QybifF07nNiZjZ7sazs2aVDcysAZVGjW7+7iFYxg6GLNM47R/thYfLdrXc+2f11Vi9A== - dependencies: - "@babel/compat-data" "^7.22.5" - "@babel/helper-compilation-targets" "^7.22.5" - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-option" "^7.22.5" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.22.5" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.22.5" - "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.22.5" - "@babel/plugin-syntax-import-attributes" "^7.22.5" - "@babel/plugin-syntax-import-meta" "^7.10.4" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" - "@babel/plugin-transform-arrow-functions" "^7.22.5" - "@babel/plugin-transform-async-generator-functions" "^7.22.5" - "@babel/plugin-transform-async-to-generator" "^7.22.5" - "@babel/plugin-transform-block-scoped-functions" "^7.22.5" - "@babel/plugin-transform-block-scoping" "^7.22.5" - "@babel/plugin-transform-class-properties" "^7.22.5" - "@babel/plugin-transform-class-static-block" "^7.22.5" - "@babel/plugin-transform-classes" "^7.22.5" - "@babel/plugin-transform-computed-properties" "^7.22.5" - "@babel/plugin-transform-destructuring" "^7.22.5" - "@babel/plugin-transform-dotall-regex" "^7.22.5" - "@babel/plugin-transform-duplicate-keys" "^7.22.5" - "@babel/plugin-transform-dynamic-import" "^7.22.5" - "@babel/plugin-transform-exponentiation-operator" "^7.22.5" - "@babel/plugin-transform-export-namespace-from" "^7.22.5" - "@babel/plugin-transform-for-of" "^7.22.5" - "@babel/plugin-transform-function-name" "^7.22.5" - "@babel/plugin-transform-json-strings" "^7.22.5" - "@babel/plugin-transform-literals" "^7.22.5" - "@babel/plugin-transform-logical-assignment-operators" "^7.22.5" - "@babel/plugin-transform-member-expression-literals" "^7.22.5" - "@babel/plugin-transform-modules-amd" "^7.22.5" - "@babel/plugin-transform-modules-commonjs" "^7.22.5" - "@babel/plugin-transform-modules-systemjs" "^7.22.5" - "@babel/plugin-transform-modules-umd" "^7.22.5" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.22.5" - "@babel/plugin-transform-new-target" "^7.22.5" - "@babel/plugin-transform-nullish-coalescing-operator" "^7.22.5" - "@babel/plugin-transform-numeric-separator" "^7.22.5" - "@babel/plugin-transform-object-rest-spread" "^7.22.5" - "@babel/plugin-transform-object-super" "^7.22.5" - "@babel/plugin-transform-optional-catch-binding" "^7.22.5" - "@babel/plugin-transform-optional-chaining" "^7.22.5" - "@babel/plugin-transform-parameters" "^7.22.5" - "@babel/plugin-transform-private-methods" "^7.22.5" - "@babel/plugin-transform-private-property-in-object" "^7.22.5" - "@babel/plugin-transform-property-literals" "^7.22.5" - "@babel/plugin-transform-regenerator" "^7.22.5" - "@babel/plugin-transform-reserved-words" "^7.22.5" - "@babel/plugin-transform-shorthand-properties" "^7.22.5" - "@babel/plugin-transform-spread" "^7.22.5" - "@babel/plugin-transform-sticky-regex" "^7.22.5" - "@babel/plugin-transform-template-literals" "^7.22.5" - "@babel/plugin-transform-typeof-symbol" "^7.22.5" - "@babel/plugin-transform-unicode-escapes" "^7.22.5" - "@babel/plugin-transform-unicode-property-regex" "^7.22.5" - "@babel/plugin-transform-unicode-regex" "^7.22.5" - "@babel/plugin-transform-unicode-sets-regex" "^7.22.5" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.22.5" - babel-plugin-polyfill-corejs2 "^0.4.3" - babel-plugin-polyfill-corejs3 "^0.8.1" - babel-plugin-polyfill-regenerator "^0.5.0" - core-js-compat "^3.30.2" - semver "^6.3.0" - -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.5.tgz#ef939d6e7f268827e1841638dc6ff95515e115d9" - integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" - "@babel/types" "^7.4.4" - esutils "^2.0.2" - -"@babel/preset-react@^7.12.5", "@babel/preset-react@^7.16.0": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.22.5.tgz#c4d6058fbf80bccad02dd8c313a9aaa67e3c3dd6" - integrity sha512-M+Is3WikOpEJHgR385HbuCITPTaPRaNkibTEa9oiofmJvIsrceb4yp9RL9Kb+TE8LznmeyZqpP+Lopwcx59xPQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-option" "^7.22.5" - "@babel/plugin-transform-react-display-name" "^7.22.5" - "@babel/plugin-transform-react-jsx" "^7.22.5" - "@babel/plugin-transform-react-jsx-development" "^7.22.5" - "@babel/plugin-transform-react-pure-annotations" "^7.22.5" - -"@babel/preset-typescript@^7.16.0": - version "7.22.5" - resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.22.5.tgz#16367d8b01d640e9a507577ed4ee54e0101e51c8" - integrity sha512-YbPaal9LxztSGhmndR46FmAbkJ/1fAsw293tSU+I5E5h+cnJ3d4GTwyUgGYmOXJYdGA+uNePle4qbaRzj2NISQ== - dependencies: - "@babel/helper-plugin-utils" "^7.22.5" - "@babel/helper-validator-option" "^7.22.5" - "@babel/plugin-syntax-jsx" "^7.22.5" - "@babel/plugin-transform-modules-commonjs" "^7.22.5" - "@babel/plugin-transform-typescript" "^7.22.5" - -"@babel/regjsgen@^0.8.0": - version "0.8.0" - resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" - integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== - -"@babel/runtime-corejs3@^7.10.2": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.14.0.tgz#6bf5fbc0b961f8e3202888cb2cd0fb7a0a9a3f66" - integrity sha512-0R0HTZWHLk6G8jIk0FtoX+AatCtKnswS98VhXwGImFc759PJRp4Tru0PQYZofyijTFUr+gT8Mu7sgXVJLQ0ceg== - dependencies: - core-js-pure "^3.0.0" - regenerator-runtime "^0.13.4" - -"@babel/runtime@7.13.10": - version "7.13.10" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.10.tgz#47d42a57b6095f4468da440388fdbad8bebf0d7d" - integrity sha512-4QPkjJq6Ns3V/RgpEahRk+AGfL0eO6RHHtTWoNNr5mO49G6B5+X6d6THgWEAvTrznU5xYpbAlVKRYcsCgh/Akw== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.14.0", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.14.0", "@babel/runtime@^7.16.7", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.1", "@babel/runtime@^7.20.7", "@babel/runtime@^7.3.1", "@babel/runtime@^7.5.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.5.tgz#8564dd588182ce0047d55d7a75e93921107b57ec" integrity sha512-ecjvYlnAaZ/KVneE/OdKYBYfgXV3Ptu6zQWmgEF7vwKhQnvVS6bjMD2XYgj+SNvQ1GfK/pjgokfPkC/2CO8CuA== @@ -1471,7 +834,7 @@ "@babel/parser" "^7.22.15" "@babel/types" "^7.22.15" -"@babel/template@^7.22.5", "@babel/template@^7.3.3", "@babel/template@^7.4.4": +"@babel/template@^7.22.5", "@babel/template@^7.4.4": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" integrity sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw== @@ -1480,7 +843,7 @@ "@babel/parser" "^7.22.5" "@babel/types" "^7.22.5" -"@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.22.5", "@babel/traverse@^7.23.5", "@babel/traverse@^7.4.5", "@babel/traverse@^7.7.2": +"@babel/traverse@>=7.23.2", "@babel/traverse@^7.1.6", "@babel/traverse@^7.14.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.22.5", "@babel/traverse@^7.23.2", "@babel/traverse@^7.23.5", "@babel/traverse@^7.4.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" integrity sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w== @@ -1496,7 +859,7 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.12.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": +"@babel/types@^7.0.0", "@babel/types@^7.1.6", "@babel/types@^7.20.7", "@babel/types@^7.22.5", "@babel/types@^7.3.0": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.22.5.tgz#cd93eeaab025880a3a47ec881f4b096a5b786fbe" integrity sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA== @@ -1514,141 +877,14 @@ "@babel/helper-validator-identifier" "^7.22.20" to-fast-properties "^2.0.0" -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@craco/craco@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@craco/craco/-/craco-7.1.0.tgz#12bd394c7f0334e214302e4d35a1768f68042fbb" - integrity sha512-oRAcPIKYrfPXp9rSzlsDNeOaVtDiKhoyqSXUoqiK24jCkHr4T8m/a2f74yXIzCbIheoUWDOIfWZyRgFgT+cpqA== - dependencies: - autoprefixer "^10.4.12" - cosmiconfig "^7.0.1" - cosmiconfig-typescript-loader "^1.0.0" - cross-spawn "^7.0.3" - lodash "^4.17.21" - semver "^7.3.7" - webpack-merge "^5.8.0" - -"@cspotcode/source-map-support@^0.8.0": - version "0.8.1" - resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" - integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== - dependencies: - "@jridgewell/trace-mapping" "0.3.9" - -"@csstools/normalize.css@*": - version "12.0.0" - resolved "https://registry.yarnpkg.com/@csstools/normalize.css/-/normalize.css-12.0.0.tgz#a9583a75c3f150667771f30b60d9f059473e62c4" - integrity sha512-M0qqxAcwCsIVfpFQSlGN5XjXWu8l5JDZN+fPt1LeW5SZexQTgnaEvgXAY+CeygRw0EeppWHi12JxESWiWrB0Sg== - -"@csstools/postcss-cascade-layers@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-cascade-layers/-/postcss-cascade-layers-1.1.1.tgz#8a997edf97d34071dd2e37ea6022447dd9e795ad" - integrity sha512-+KdYrpKC5TgomQr2DlZF4lDEpHcoxnj5IGddYYfBWJAKfj1JtuHUIqMa+E1pJJ+z3kvDViWMqyqPlG4Ja7amQA== - dependencies: - "@csstools/selector-specificity" "^2.0.2" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-color-function@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-color-function/-/postcss-color-function-1.1.1.tgz#2bd36ab34f82d0497cfacdc9b18d34b5e6f64b6b" - integrity sha512-Bc0f62WmHdtRDjf5f3e2STwRAl89N2CLb+9iAwzrv4L2hncrbDwnQD9PCq0gtAt7pOI2leIV08HIBUd4jxD8cw== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-font-format-keywords@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-font-format-keywords/-/postcss-font-format-keywords-1.0.1.tgz#677b34e9e88ae997a67283311657973150e8b16a" - integrity sha512-ZgrlzuUAjXIOc2JueK0X5sZDjCtgimVp/O5CEqTcs5ShWBa6smhWYbS0x5cVc/+rycTDbjjzoP0KTDnUneZGOg== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-hwb-function@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@csstools/postcss-hwb-function/-/postcss-hwb-function-1.0.2.tgz#ab54a9fce0ac102c754854769962f2422ae8aa8b" - integrity sha512-YHdEru4o3Rsbjmu6vHy4UKOXZD+Rn2zmkAmLRfPet6+Jz4Ojw8cbWxe1n42VaXQhD3CQUXXTooIy8OkVbUcL+w== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-ic-unit@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-ic-unit/-/postcss-ic-unit-1.0.1.tgz#28237d812a124d1a16a5acc5c3832b040b303e58" - integrity sha512-Ot1rcwRAaRHNKC9tAqoqNZhjdYBzKk1POgWfhN4uCOE47ebGcLRqXjKkApVDpjifL6u2/55ekkpnFcp+s/OZUw== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-is-pseudo-class@^2.0.7": - version "2.0.7" - resolved "https://registry.yarnpkg.com/@csstools/postcss-is-pseudo-class/-/postcss-is-pseudo-class-2.0.7.tgz#846ae6c0d5a1eaa878fce352c544f9c295509cd1" - integrity sha512-7JPeVVZHd+jxYdULl87lvjgvWldYu+Bc62s9vD/ED6/QTGjy0jy0US/f6BG53sVMTBJ1lzKZFpYmofBN9eaRiA== - dependencies: - "@csstools/selector-specificity" "^2.0.0" - postcss-selector-parser "^6.0.10" - -"@csstools/postcss-nested-calc@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-nested-calc/-/postcss-nested-calc-1.0.0.tgz#d7e9d1d0d3d15cf5ac891b16028af2a1044d0c26" - integrity sha512-JCsQsw1wjYwv1bJmgjKSoZNvf7R6+wuHDAbi5f/7MbFhl2d/+v+TvBTU4BJH3G1X1H87dHl0mh6TfYogbT/dJQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-normalize-display-values@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-normalize-display-values/-/postcss-normalize-display-values-1.0.1.tgz#15da54a36e867b3ac5163ee12c1d7f82d4d612c3" - integrity sha512-jcOanIbv55OFKQ3sYeFD/T0Ti7AMXc9nM1hZWu8m/2722gOTxFg7xYu4RDLJLeZmPUVQlGzo4jhzvTUq3x4ZUw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-oklab-function@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-oklab-function/-/postcss-oklab-function-1.1.1.tgz#88cee0fbc8d6df27079ebd2fa016ee261eecf844" - integrity sha512-nJpJgsdA3dA9y5pgyb/UfEzE7W5Ka7u0CX0/HIMVBNWzWemdcTH3XwANECU6anWv/ao4vVNLTMxhiPNZsTK6iA== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -"@csstools/postcss-progressive-custom-properties@^1.1.0", "@csstools/postcss-progressive-custom-properties@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-progressive-custom-properties/-/postcss-progressive-custom-properties-1.3.0.tgz#542292558384361776b45c85226b9a3a34f276fa" - integrity sha512-ASA9W1aIy5ygskZYuWams4BzafD12ULvSypmaLJT2jvQ8G0M3I8PRQhC0h7mG0Z3LI05+agZjqSR9+K9yaQQjA== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-stepped-value-functions@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@csstools/postcss-stepped-value-functions/-/postcss-stepped-value-functions-1.0.1.tgz#f8772c3681cc2befed695e2b0b1d68e22f08c4f4" - integrity sha512-dz0LNoo3ijpTOQqEJLY8nyaapl6umbmDcgj4AD0lgVQ572b2eqA1iGZYTTWhrcrHztWDDRAX2DGYyw2VBjvCvQ== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-text-decoration-shorthand@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@csstools/postcss-text-decoration-shorthand/-/postcss-text-decoration-shorthand-1.0.0.tgz#ea96cfbc87d921eca914d3ad29340d9bcc4c953f" - integrity sha512-c1XwKJ2eMIWrzQenN0XbcfzckOLLJiczqy+YvfGmzoVXd7pT9FfObiSEfzs84bpE/VqfpEuAZ9tCRbZkZxxbdw== - dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-trigonometric-functions@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@csstools/postcss-trigonometric-functions/-/postcss-trigonometric-functions-1.0.2.tgz#94d3e4774c36d35dcdc88ce091336cb770d32756" - integrity sha512-woKaLO///4bb+zZC2s80l+7cm07M7268MsyG3M0ActXXEFi6SuhvriQYcb58iiKGbjwwIU7n45iRLEHypB47Og== +"@babel/types@^7.21.3": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd" + integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg== dependencies: - postcss-value-parser "^4.2.0" - -"@csstools/postcss-unset-value@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@csstools/postcss-unset-value/-/postcss-unset-value-1.0.2.tgz#c99bb70e2cdc7312948d1eb41df2412330b81f77" - integrity sha512-c8J4roPBILnelAsdLr4XOAR/GsTm0GJi4XpcfvoWk3U6KiTCqiFYc63KhRMQQX35jYMp4Ao8Ij9+IZRgMfJp1g== - -"@csstools/selector-specificity@^2.0.0", "@csstools/selector-specificity@^2.0.2": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-2.2.0.tgz#2cbcf822bf3764c9658c4d2e568bd0c0cb748016" - integrity sha512-+OJ9konv95ClSTOJCmMZqpd5+YGsB2S+x6w3E1oaM8UuR5j8nTNHYSz8c9BEPGDOCMQYIEEGlVPj/VY64iTbGw== + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" "@ctrl/tinycolor@^3.3.1", "@ctrl/tinycolor@^3.4.0": version "3.4.0" @@ -1796,7 +1032,117 @@ resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.3.0.tgz#ea89004119dc42db2e1dba0f97d553f7372f6fcb" integrity sha512-AHPmaAx+RYfZz0eYu6Gviiagpmiyw98ySSlQvCUhVGDRtDFe4DBS0x1bSjdF3gqUDYOczB+yYvBTtEylYSdRhg== -"@eslint-community/eslint-utils@^4.2.0": +"@esbuild/android-arm64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz#984b4f9c8d0377443cc2dfcef266d02244593622" + integrity sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ== + +"@esbuild/android-arm@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.18.20.tgz#fedb265bc3a589c84cc11f810804f234947c3682" + integrity sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw== + +"@esbuild/android-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.18.20.tgz#35cf419c4cfc8babe8893d296cd990e9e9f756f2" + integrity sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg== + +"@esbuild/darwin-arm64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz#08172cbeccf95fbc383399a7f39cfbddaeb0d7c1" + integrity sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA== + +"@esbuild/darwin-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz#d70d5790d8bf475556b67d0f8b7c5bdff053d85d" + integrity sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ== + +"@esbuild/freebsd-arm64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz#98755cd12707f93f210e2494d6a4b51b96977f54" + integrity sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw== + +"@esbuild/freebsd-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz#c1eb2bff03915f87c29cece4c1a7fa1f423b066e" + integrity sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ== + +"@esbuild/linux-arm64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz#bad4238bd8f4fc25b5a021280c770ab5fc3a02a0" + integrity sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA== + +"@esbuild/linux-arm@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz#3e617c61f33508a27150ee417543c8ab5acc73b0" + integrity sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg== + +"@esbuild/linux-ia32@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz#699391cccba9aee6019b7f9892eb99219f1570a7" + integrity sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA== + +"@esbuild/linux-loong64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz#e6fccb7aac178dd2ffb9860465ac89d7f23b977d" + integrity sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg== + +"@esbuild/linux-mips64el@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz#eeff3a937de9c2310de30622a957ad1bd9183231" + integrity sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ== + +"@esbuild/linux-ppc64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz#2f7156bde20b01527993e6881435ad79ba9599fb" + integrity sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA== + +"@esbuild/linux-riscv64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz#6628389f210123d8b4743045af8caa7d4ddfc7a6" + integrity sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A== + +"@esbuild/linux-s390x@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz#255e81fb289b101026131858ab99fba63dcf0071" + integrity sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ== + +"@esbuild/linux-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz#c7690b3417af318a9b6f96df3031a8865176d338" + integrity sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w== + +"@esbuild/netbsd-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz#30e8cd8a3dded63975e2df2438ca109601ebe0d1" + integrity sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A== + +"@esbuild/openbsd-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz#7812af31b205055874c8082ea9cf9ab0da6217ae" + integrity sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg== + +"@esbuild/sunos-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz#d5c275c3b4e73c9b0ecd38d1ca62c020f887ab9d" + integrity sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ== + +"@esbuild/win32-arm64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz#73bc7f5a9f8a77805f357fab97f290d0e4820ac9" + integrity sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg== + +"@esbuild/win32-ia32@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz#ec93cbf0ef1085cc12e71e0d661d20569ff42102" + integrity sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g== + +"@esbuild/win32-x64@0.18.20": + version "0.18.20" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz#786c5f41f043b07afb1af37683d7c33668858f6d" + integrity sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ== + +"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== @@ -2279,18 +1625,6 @@ resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950" integrity sha512-wYn6r8zVZyQJ6rQaALBEln5B1pzxb9shV5Ef97kTvn6yVGrqyXVnDqnU24MXnFubR+rZjBY9NWuxX3FB2sTsjg== -"@hapi/hoek@^9.0.0": - version "9.2.0" - resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz#f3933a44e365864f4dad5db94158106d511e8131" - integrity sha512-sqKVVVOe5ivCaXDWivIJYVSaEgdQK9ul7a4Kity5Iw7u9+wBAPbX1RMSnLLmp7O4Vzj0WOWwMAJsTL00xwaNug== - -"@hapi/topo@^5.0.0": - version "5.1.0" - resolved "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012" - integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg== - dependencies: - "@hapi/hoek" "^9.0.0" - "@humanwhocodes/config-array@^0.11.10": version "0.11.10" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.10.tgz#5a3ffe32cc9306365fb3fd572596cd602d5e12d2" @@ -2315,305 +1649,54 @@ resolved "https://registry.yarnpkg.com/@icons/material/-/material-0.2.4.tgz#e90c9f71768b3736e76d7dd6783fc6c2afa88bc8" integrity sha512-QPcGmICAPbGLGb6F/yNf/KzKqvFx8z5qx3D1yFqVAjoFmXK35EgyW+cJ57Te3CNsmzblwtzakLGFqHPqrfb4Tw== -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== +"@jest/schemas@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + "@sinclair/typebox" "^0.27.8" -"@jest/console@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-27.5.1.tgz#260fe7239602fe5130a94f1aa386eff54b014bba" - integrity sha512-kZ/tNpS3NXn0mlXXXPNuDZnb4c0oZ20r4K5eemM2k30ZC3G0T02nXUvyhf5YdbXWHPEJLc9qGLxEZ216MdL+Zg== +"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" -"@jest/console@^28.1.3": - version "28.1.3" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" - integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== - dependencies: - "@jest/types" "^28.1.3" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^28.1.3" - jest-util "^28.1.3" - slash "^3.0.0" +"@jridgewell/resolve-uri@3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== -"@jest/core@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-27.5.1.tgz#267ac5f704e09dc52de2922cbf3af9edcd64b626" - integrity sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/reporters" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.8.1" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^27.5.1" - jest-config "^27.5.1" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-resolve-dependencies "^27.5.1" - jest-runner "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - jest-watcher "^27.5.1" - micromatch "^4.0.4" - rimraf "^3.0.0" - slash "^3.0.0" - strip-ansi "^6.0.0" +"@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== -"@jest/environment@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-27.5.1.tgz#d7425820511fe7158abbecc010140c3fd3be9c74" - integrity sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== - dependencies: - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" +"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== -"@jest/fake-timers@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-27.5.1.tgz#76979745ce0579c8a94a4678af7a748eda8ada74" - integrity sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== - dependencies: - "@jest/types" "^27.5.1" - "@sinonjs/fake-timers" "^8.0.1" - "@types/node" "*" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-util "^27.5.1" +"@jridgewell/sourcemap-codec@^1.4.15": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== -"@jest/globals@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-27.5.1.tgz#7ac06ce57ab966566c7963431cef458434601b2b" - integrity sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== +"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.18" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== dependencies: - "@jest/environment" "^27.5.1" - "@jest/types" "^27.5.1" - expect "^27.5.1" + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" -"@jest/reporters@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-27.5.1.tgz#ceda7be96170b03c923c37987b64015812ffec04" - integrity sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.2" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-haste-map "^27.5.1" - jest-resolve "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - slash "^3.0.0" - source-map "^0.6.0" - string-length "^4.0.1" - terminal-link "^2.0.0" - v8-to-istanbul "^8.1.0" - -"@jest/schemas@^28.1.3": - version "28.1.3" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" - integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== - dependencies: - "@sinclair/typebox" "^0.24.1" - -"@jest/source-map@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-27.5.1.tgz#6608391e465add4205eae073b55e7f279e04e8cf" - integrity sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== - dependencies: - callsites "^3.0.0" - graceful-fs "^4.2.9" - source-map "^0.6.0" - -"@jest/test-result@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-27.5.1.tgz#56a6585fa80f7cdab72b8c5fc2e871d03832f5bb" - integrity sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== - dependencies: - "@jest/console" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-result@^28.1.3": - version "28.1.3" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" - integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== - dependencies: - "@jest/console" "^28.1.3" - "@jest/types" "^28.1.3" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-27.5.1.tgz#4057e0e9cea4439e544c6353c6affe58d095745b" - integrity sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== - dependencies: - "@jest/test-result" "^27.5.1" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-runtime "^27.5.1" - -"@jest/transform@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-27.5.1.tgz#6c3501dcc00c4c08915f292a600ece5ecfe1f409" - integrity sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== - dependencies: - "@babel/core" "^7.1.0" - "@jest/types" "^27.5.1" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^1.4.0" - fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-regex-util "^27.5.1" - jest-util "^27.5.1" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - source-map "^0.6.1" - write-file-atomic "^3.0.0" - -"@jest/types@^26.6.2": - version "26.6.2" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e" - integrity sha512-fC6QCp7Sc5sX6g8Tvbmj4XUTbyrik0akgRy03yjXbQaBWWNWGE7SGtJk98m0N8nzegD/7SggrUlivxo5ax4KWQ== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^15.0.0" - chalk "^4.0.0" - -"@jest/types@^27.5.1": - version "27.5.1" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-27.5.1.tgz#3c79ec4a8ba61c170bf937bcf9e98a9df175ec80" - integrity sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^16.0.0" - chalk "^4.0.0" - -"@jest/types@^28.1.3": - version "28.1.3" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" - integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== - dependencies: - "@jest/schemas" "^28.1.3" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@3.1.0": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" - integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== - -"@jridgewell/resolve-uri@^3.0.3": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== - -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/source-map@^0.3.3": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/source-map/-/source-map-0.3.3.tgz#8108265659d4c33e72ffe14e33d6cc5eb59f2fda" - integrity sha512-b+fsZXeLYi9fEULmfBrhxn4IrPlINf8fiNarzTof004v3lFdntdwa9PF7vFJqm3mg7s+ScJMxXaE3Acp1irZcg== - dependencies: - "@jridgewell/gen-mapping" "^0.3.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== - -"@jridgewell/trace-mapping@0.3.9": - version "0.3.9" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" - integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.18" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" - integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== - dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" - -"@leichtgewicht/ip-codec@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" - integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== - -"@linaria/core@3.0.0-beta.13": - version "3.0.0-beta.13" - resolved "https://registry.yarnpkg.com/@linaria/core/-/core-3.0.0-beta.13.tgz#049c5be5faa67e341e413a0f6b641d5d78d91056" - integrity sha512-3zEi5plBCOsEzUneRVuQb+2SAx3qaC1dj0FfFAI6zIJQoDWu0dlSwKijMRack7oO9tUWrchfj3OkKQAd1LBdVg== +"@linaria/core@3.0.0-beta.13": + version "3.0.0-beta.13" + resolved "https://registry.yarnpkg.com/@linaria/core/-/core-3.0.0-beta.13.tgz#049c5be5faa67e341e413a0f6b641d5d78d91056" + integrity sha512-3zEi5plBCOsEzUneRVuQb+2SAx3qaC1dj0FfFAI6zIJQoDWu0dlSwKijMRack7oO9tUWrchfj3OkKQAd1LBdVg== "@lingui/core@^3.14.0": version "3.15.0" @@ -2638,19 +1721,6 @@ refractor "^3.3.1" unist-util-visit "^2.0.3" -"@miragejs/graphql@^0.1.11": - version "0.1.12" - resolved "https://registry.npmjs.org/@miragejs/graphql/-/graphql-0.1.12.tgz#60679c4ad807fc4a001bc88aba396ba3fa5a958b" - integrity sha512-9FI7+ZWeIl7oqL//0hp72o1mXzcL4qphGUuqFaptX6Yrd/UPd1fNbmbgCGnFVZGCsNuEhNX0xgn0/YDdncvJDg== - dependencies: - graphql "^15.0.0" - miragejs "^0.1.0" - -"@miragejs/pretender-node-polyfill@^0.1.0": - version "0.1.2" - resolved "https://registry.npmjs.org/@miragejs/pretender-node-polyfill/-/pretender-node-polyfill-0.1.2.tgz#d26b6b7483fb70cd62189d05c95d2f67153e43f2" - integrity sha512-M/BexG/p05C5lFfMunxo/QcgIJnMT2vDVCd00wNqK2ImZONIlEETZwWJu1QtLxtmYlSHlCFl3JNzp0tLe7OJ5g== - "@monaco-editor/loader@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@monaco-editor/loader/-/loader-1.2.0.tgz#373fad69973384624e3d9b60eefd786461a76acd" @@ -2752,13 +1822,6 @@ prop-types "^15.8.1" react-is "^18.2.0" -"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1": - version "5.1.1-v1" - resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129" - integrity sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== - dependencies: - eslint-scope "5.1.1" - "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" @@ -2814,21 +1877,6 @@ tslib "^2.5.0" webcrypto-core "^1.7.7" -"@pmmmwh/react-refresh-webpack-plugin@^0.5.3": - version "0.5.10" - resolved "https://registry.yarnpkg.com/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.5.10.tgz#2eba163b8e7dbabb4ce3609ab5e32ab63dda3ef8" - integrity sha512-j0Ya0hCFZPd4x40qLzbhGsh9TMtdb+CJQiso+WxLOPNasohq9cc5SNUcwsZaRH6++Xh91Xkm/xHCkuIiIu0LUA== - dependencies: - ansi-html-community "^0.0.8" - common-path-prefix "^3.0.0" - core-js-pure "^3.23.3" - error-stack-parser "^2.0.6" - find-up "^5.0.0" - html-entities "^2.1.0" - loader-utils "^2.0.4" - schema-utils "^3.0.0" - source-map "^0.7.3" - "@popperjs/core@^2.11.6", "@popperjs/core@^2.9.2": version "2.11.6" resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.6.tgz#cee20bd55e68a1720bdab363ecf0c821ded4cd45" @@ -3820,124 +2868,29 @@ resolved "https://registry.yarnpkg.com/@repeaterjs/repeater/-/repeater-3.0.5.tgz#b77571685410217a548a9c753aa3cdfc215bfc78" integrity sha512-l3YHBLAol6d/IKnB9LhpD0cEZWAoe3eFKUyTYWmFmCO2Q/WOckxLQAUyMZWwZV2M/m3+4vgRoaolFqaII82/TA== -"@rollup/plugin-babel@^5.2.0": - version "5.3.1" - resolved "https://registry.yarnpkg.com/@rollup/plugin-babel/-/plugin-babel-5.3.1.tgz#04bc0608f4aa4b2e4b1aebf284344d0f68fda283" - integrity sha512-WFfdLWU/xVWKeRQnKmIAQULUI7Il0gZnBIH/ZFO069wYIfPu+8zrfp/KMW0atmELoRDq8FbiP3VCss9MhCut7Q== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@rollup/pluginutils" "^3.1.0" - -"@rollup/plugin-node-resolve@^11.2.1": - version "11.2.1" - resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-11.2.1.tgz#82aa59397a29cd4e13248b106e6a4a1880362a60" - integrity sha512-yc2n43jcqVyGE2sqV5/YCmocy9ArjVAP/BeXyTtADTBBX6V0e5UMqwO8CdQ0kzjb6zu5P1qMzsScCMRvE9OlVg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - "@types/resolve" "1.17.1" - builtin-modules "^3.1.0" - deepmerge "^4.2.2" - is-module "^1.0.0" - resolve "^1.19.0" - -"@rollup/plugin-replace@^2.4.1": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-2.4.2.tgz#a2d539314fbc77c244858faa523012825068510a" - integrity sha512-IGcu+cydlUMZ5En85jxHH4qj2hta/11BHq95iHEyb2sbgiN0eCdzvUcHw5gt9pBL5lTi4JDYJ1acCoMGpTvEZg== - dependencies: - "@rollup/pluginutils" "^3.1.0" - magic-string "^0.25.7" - -"@rollup/pluginutils@^3.1.0": - version "3.1.0" - resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-3.1.0.tgz#706b4524ee6dc8b103b3c995533e5ad680c02b9b" - integrity sha512-GksZ6pr6TpIjHm8h9lSQ8pi8BE9VeubNT0OMJ3B5uZJ8pz73NPiqOtCog/x2/QzM1ENChPKxMDhiQuRHsqc+lg== +"@rollup/pluginutils@^5.0.4": + version "5.0.5" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.0.5.tgz#bbb4c175e19ebfeeb8c132c2eea0ecb89941a66c" + integrity sha512-6aEYR910NyP73oHiJglti74iRyOwgFU4x3meH/H8OJx6Ry0j6cOVZ5X/wTvub7G7Ao6qaHBEaNsV3GLJkSsF+Q== dependencies: - "@types/estree" "0.0.39" - estree-walker "^1.0.1" - picomatch "^2.2.2" + "@types/estree" "^1.0.0" + estree-walker "^2.0.2" + picomatch "^2.3.1" "@rooks/use-mutation-observer@4.11.2": version "4.11.2" resolved "https://registry.yarnpkg.com/@rooks/use-mutation-observer/-/use-mutation-observer-4.11.2.tgz#a0466c4338e0a4487ea19253c86bcd427c29f4af" integrity sha512-vpsdrZdr6TkB1zZJcHx+fR1YC/pHs2BaqcuYiEGjBVbwY5xcC49+h0hAUtQKHth3oJqXfIX/Ng8S7s5HFHdM/A== -"@rushstack/eslint-patch@^1.1.0": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.3.2.tgz#31b9c510d8cada9683549e1dbb4284cca5001faf" - integrity sha512-V+MvGwaHH03hYhY+k6Ef/xKd6RYlc4q8WBx+2ANmipHJcKuktNcI/NgEsJgdSUF6Lw32njT6OnrRsKYCdgHjYw== - "@seznam/compose-react-refs@^1.0.6": version "1.0.6" resolved "https://registry.yarnpkg.com/@seznam/compose-react-refs/-/compose-react-refs-1.0.6.tgz#6ec4e70bdd6e32f8e70b4100f27267cf306bd8df" integrity sha512-izzOXQfeQLonzrIQb8u6LQ8dk+ymz3WXTIXjvOlTXHq6sbzROg3NWU+9TTAOpEoK9Bth24/6F/XrfHJ5yR5n6Q== -"@sideway/address@^4.1.3": - version "4.1.4" - resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" - integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== - dependencies: - "@hapi/hoek" "^9.0.0" - -"@sideway/formula@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" - integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== - -"@sideway/pinpoint@^2.0.0": - version "2.0.0" - resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz#cff8ffadc372ad29fd3f78277aeb29e632cc70df" - integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== - -"@sinclair/typebox@^0.24.1": - version "0.24.51" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" - integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== - -"@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3": - version "1.8.3" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" - integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^7.0.4", "@sinonjs/fake-timers@^7.1.0": - version "7.1.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.1.0.tgz#8f13af27d842cbf51ad4502e05562fe9391d084e" - integrity sha512-hAEzXi6Wbvlb67NnGMGSNOeAflLVnMa4yliPU/ty1qjgW/vAletH15/v/esJwASSIA0YlIyjnloenFbEZc9q9A== - dependencies: - "@sinonjs/commons" "^1.7.0" - -"@sinonjs/fake-timers@^8.0.1": - version "8.1.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7" - integrity sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== - dependencies: - "@sinonjs/commons" "^1.7.0" - -"@sinonjs/samsam@^6.0.2": - version "6.0.2" - resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-6.0.2.tgz#a0117d823260f282c04bff5f8704bdc2ac6910bb" - integrity sha512-jxPRPp9n93ci7b8hMfJOFDPRLFYadN6FSpeROFTR4UNF4i5b+EK6m4QXPO46BDhFgRy1JuS87zAnFOzCUwMJcQ== - dependencies: - "@sinonjs/commons" "^1.6.0" - lodash.get "^4.4.2" - type-detect "^4.0.8" - -"@sinonjs/text-encoding@^0.7.1": - version "0.7.1" - resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5" - integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== - -"@surma/rollup-plugin-off-main-thread@^2.2.3": - version "2.2.3" - resolved "https://registry.yarnpkg.com/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-2.2.3.tgz#ee34985952ca21558ab0d952f00298ad2190c053" - integrity sha512-lR8q/9W7hZpMWweNiAKU7NQerBnzQQLvi8qnTDU/fxItPhtZVMbPV3lbCwjhIlNBe9Bbr5V+KHshvWmVSG9cxQ== - dependencies: - ejs "^3.1.6" - json5 "^2.2.0" - magic-string "^0.25.0" - string.prototype.matchall "^4.0.6" +"@sinclair/typebox@^0.27.8": + version "0.27.8" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== "@svgmoji/blob@^3.2.0": version "3.2.0" @@ -3983,189 +2936,145 @@ "@babel/runtime" "^7.12.5" "@svgmoji/core" "^3.2.0" -"@svgr/babel-plugin-add-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-5.4.0.tgz#81ef61947bb268eb9d50523446f9c638fb355906" - integrity sha512-ZFf2gs/8/6B8PnSofI0inYXr2SDNTDScPXhN7k5EqD4aZ3gi6u+rbmZHVB8IM3wDyx8ntKACZbtXSm7oZGRqVg== +"@svgr/babel-plugin-add-jsx-attribute@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz#4001f5d5dd87fa13303e36ee106e3ff3a7eb8b22" + integrity sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g== -"@svgr/babel-plugin-remove-jsx-attribute@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-5.4.0.tgz#6b2c770c95c874654fd5e1d5ef475b78a0a962ef" - integrity sha512-yaS4o2PgUtwLFGTKbsiAy6D0o3ugcUhWK0Z45umJ66EPWunAz9fuFw2gJuje6wqQvQWOTJvIahUwndOXb7QCPg== +"@svgr/babel-plugin-remove-jsx-attribute@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz#69177f7937233caca3a1afb051906698f2f59186" + integrity sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA== -"@svgr/babel-plugin-remove-jsx-empty-expression@^5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-5.0.1.tgz#25621a8915ed7ad70da6cea3d0a6dbc2ea933efd" - integrity sha512-LA72+88A11ND/yFIMzyuLRSMJ+tRKeYKeQ+mR3DcAZ5I4h5CPWN9AHyUzJbWSYp/u2u0xhmgOe0+E41+GjEueA== +"@svgr/babel-plugin-remove-jsx-empty-expression@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz#c2c48104cfd7dcd557f373b70a56e9e3bdae1d44" + integrity sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA== -"@svgr/babel-plugin-replace-jsx-attribute-value@^5.0.1": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-5.0.1.tgz#0b221fc57f9fcd10e91fe219e2cd0dd03145a897" - integrity sha512-PoiE6ZD2Eiy5mK+fjHqwGOS+IXX0wq/YDtNyIgOrc6ejFnxN4b13pRpiIPbtPwHEc+NT2KCjteAcq33/F1Y9KQ== +"@svgr/babel-plugin-replace-jsx-attribute-value@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz#8fbb6b2e91fa26ac5d4aa25c6b6e4f20f9c0ae27" + integrity sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ== -"@svgr/babel-plugin-svg-dynamic-title@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-5.4.0.tgz#139b546dd0c3186b6e5db4fefc26cb0baea729d7" - integrity sha512-zSOZH8PdZOpuG1ZVx/cLVePB2ibo3WPpqo7gFIjLV9a0QsuQAzJiwwqmuEdTaW2pegyBE17Uu15mOgOcgabQZg== +"@svgr/babel-plugin-svg-dynamic-title@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz#1d5ba1d281363fc0f2f29a60d6d936f9bbc657b0" + integrity sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og== -"@svgr/babel-plugin-svg-em-dimensions@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-5.4.0.tgz#6543f69526632a133ce5cabab965deeaea2234a0" - integrity sha512-cPzDbDA5oT/sPXDCUYoVXEmm3VIoAWAPT6mSPTJNbQaBNUuEKVKyGH93oDY4e42PYHRW67N5alJx/eEol20abw== +"@svgr/babel-plugin-svg-em-dimensions@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz#35e08df300ea8b1d41cb8f62309c241b0369e501" + integrity sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g== -"@svgr/babel-plugin-transform-react-native-svg@^5.4.0": - version "5.4.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-5.4.0.tgz#00bf9a7a73f1cad3948cdab1f8dfb774750f8c80" - integrity sha512-3eYP/SaopZ41GHwXma7Rmxcv9uRslRDTY1estspeB1w1ueZWd/tPlMfEOoccYpEMZU3jD4OU7YitnXcF5hLW2Q== +"@svgr/babel-plugin-transform-react-native-svg@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz#90a8b63998b688b284f255c6a5248abd5b28d754" + integrity sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q== -"@svgr/babel-plugin-transform-svg-component@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-5.5.0.tgz#583a5e2a193e214da2f3afeb0b9e8d3250126b4a" - integrity sha512-q4jSH1UUvbrsOtlo/tKcgSeiCHRSBdXoIoqX1pgcKK/aU3JD27wmMKwGtpB8qRYUYoyXvfGxUVKchLuR5pB3rQ== +"@svgr/babel-plugin-transform-svg-component@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz#013b4bfca88779711f0ed2739f3f7efcefcf4f7e" + integrity sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw== -"@svgr/babel-preset@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-5.5.0.tgz#8af54f3e0a8add7b1e2b0fcd5a882c55393df327" - integrity sha512-4FiXBjvQ+z2j7yASeGPEi8VD/5rrGQk4Xrq3EdJmoZgz/tpqChpo5hgXDvmEauwtvOc52q8ghhZK4Oy7qph4ig== - dependencies: - "@svgr/babel-plugin-add-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-attribute" "^5.4.0" - "@svgr/babel-plugin-remove-jsx-empty-expression" "^5.0.1" - "@svgr/babel-plugin-replace-jsx-attribute-value" "^5.0.1" - "@svgr/babel-plugin-svg-dynamic-title" "^5.4.0" - "@svgr/babel-plugin-svg-em-dimensions" "^5.4.0" - "@svgr/babel-plugin-transform-react-native-svg" "^5.4.0" - "@svgr/babel-plugin-transform-svg-component" "^5.5.0" - -"@svgr/core@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/core/-/core-5.5.0.tgz#82e826b8715d71083120fe8f2492ec7d7874a579" - integrity sha512-q52VOcsJPvV3jO1wkPtzTuKlvX7Y3xIcWRpCMtBF3MrteZJtBfQw/+u0B1BHy5ColpQc1/YVTrPEtSYIMNZlrQ== +"@svgr/babel-preset@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-8.1.0.tgz#0e87119aecdf1c424840b9d4565b7137cabf9ece" + integrity sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "8.0.0" + "@svgr/babel-plugin-remove-jsx-attribute" "8.0.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "8.0.0" + "@svgr/babel-plugin-replace-jsx-attribute-value" "8.0.0" + "@svgr/babel-plugin-svg-dynamic-title" "8.0.0" + "@svgr/babel-plugin-svg-em-dimensions" "8.0.0" + "@svgr/babel-plugin-transform-react-native-svg" "8.1.0" + "@svgr/babel-plugin-transform-svg-component" "8.0.0" + +"@svgr/core@^8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/core/-/core-8.1.0.tgz#41146f9b40b1a10beaf5cc4f361a16a3c1885e88" + integrity sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA== dependencies: - "@svgr/plugin-jsx" "^5.5.0" + "@babel/core" "^7.21.3" + "@svgr/babel-preset" "8.1.0" camelcase "^6.2.0" - cosmiconfig "^7.0.0" - -"@svgr/hast-util-to-babel-ast@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-5.5.0.tgz#5ee52a9c2533f73e63f8f22b779f93cd432a5461" - integrity sha512-cAaR/CAiZRB8GP32N+1jocovUtvlj0+e65TB50/6Lcime+EA49m/8l+P2ko+XPJ4dw3xaPS3jOL4F2X4KWxoeQ== - dependencies: - "@babel/types" "^7.12.6" + cosmiconfig "^8.1.3" + snake-case "^3.0.4" -"@svgr/plugin-jsx@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-5.5.0.tgz#1aa8cd798a1db7173ac043466d7b52236b369000" - integrity sha512-V/wVh33j12hGh05IDg8GpIUXbjAPnTdPTKuP4VNLggnwaHMPNQNae2pRnyTAILWCQdz5GyMqtO488g7CKM8CBA== +"@svgr/hast-util-to-babel-ast@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz#6952fd9ce0f470e1aded293b792a2705faf4ffd4" + integrity sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q== dependencies: - "@babel/core" "^7.12.3" - "@svgr/babel-preset" "^5.5.0" - "@svgr/hast-util-to-babel-ast" "^5.5.0" - svg-parser "^2.0.2" + "@babel/types" "^7.21.3" + entities "^4.4.0" -"@svgr/plugin-svgo@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-5.5.0.tgz#02da55d85320549324e201c7b2e53bf431fcc246" - integrity sha512-r5swKk46GuQl4RrVejVwpeeJaydoxkdwkM1mBKOgJLBUJPGaLci6ylg/IjhrRsREKDkr4kbMWdgOtbXEh0fyLQ== +"@svgr/plugin-jsx@^8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz#96969f04a24b58b174ee4cd974c60475acbd6928" + integrity sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA== dependencies: - cosmiconfig "^7.0.0" - deepmerge "^4.2.2" - svgo "^1.2.2" + "@babel/core" "^7.21.3" + "@svgr/babel-preset" "8.1.0" + "@svgr/hast-util-to-babel-ast" "8.0.0" + svg-parser "^2.0.4" -"@svgr/webpack@^5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-5.5.0.tgz#aae858ee579f5fa8ce6c3166ef56c6a1b381b640" - integrity sha512-DOBOK255wfQxguUta2INKkzPj6AIS6iafZYiYmHn6W3pHlycSRRlvWKCfLDG10fXfLWqE3DJHgRUOyJYmARa7g== - dependencies: - "@babel/core" "^7.12.3" - "@babel/plugin-transform-react-constant-elements" "^7.12.1" - "@babel/preset-env" "^7.12.1" - "@babel/preset-react" "^7.12.5" - "@svgr/core" "^5.5.0" - "@svgr/plugin-jsx" "^5.5.0" - "@svgr/plugin-svgo" "^5.5.0" - loader-utils "^2.0.0" - -"@testing-library/dom@^7.28.1": - version "7.31.0" - resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-7.31.0.tgz#938451abd3ca27e1b69bb395d4a40759fd7f5b3b" - integrity sha512-0X7ACg4YvTRDFMIuTOEj6B4NpN7i3F/4j5igOcTI5NC5J+N4TribNdErCHOZF1LBWhhcyfwxelVwvoYNMUXTOA== +"@testing-library/dom@^8.0.0": + version "8.20.1" + resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.20.1.tgz#2e52a32e46fc88369eef7eef634ac2a192decd9f" + integrity sha512-/DiOQ5xBxgdYRC8LNk7U+RWat0S3qRLeIw3ZIkMQ9kkVlRmwD/Eg8k8CqIpD6GW7u20JIUOfMKbxtiLutpjQ4g== dependencies: "@babel/code-frame" "^7.10.4" "@babel/runtime" "^7.12.5" - "@types/aria-query" "^4.2.0" - aria-query "^4.2.2" + "@types/aria-query" "^5.0.1" + aria-query "5.1.3" chalk "^4.1.0" - dom-accessibility-api "^0.5.4" - lz-string "^1.4.4" - pretty-format "^26.6.2" + dom-accessibility-api "^0.5.9" + lz-string "^1.5.0" + pretty-format "^27.0.2" -"@testing-library/jest-dom@^5.11.6": - version "5.12.0" - resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.12.0.tgz#6a5d340b092c44b7bce17a4791b47d9bc2c61443" - integrity sha512-N9Y82b2Z3j6wzIoAqajlKVF1Zt7sOH0pPee0sUHXHc5cv2Fdn23r+vpWm0MBBoGJtPOly5+Bdx1lnc3CD+A+ow== +"@testing-library/jest-dom@^6.1.4": + version "6.1.4" + resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-6.1.4.tgz#cf0835c33bc5ef00befb9e672b1e3e6a710e30e3" + integrity sha512-wpoYrCYwSZ5/AxcrjLxJmCU6I5QAJXslEeSiMQqaWmP2Kzpd1LvF/qxmAIW2qposULGWq2gw30GgVNFLSc2Jnw== dependencies: + "@adobe/css-tools" "^4.3.1" "@babel/runtime" "^7.9.2" - "@types/testing-library__jest-dom" "^5.9.1" - aria-query "^4.2.2" + aria-query "^5.0.0" chalk "^3.0.0" - css "^3.0.0" css.escape "^1.5.1" + dom-accessibility-api "^0.5.6" lodash "^4.17.15" redent "^3.0.0" -"@testing-library/react@^11.2.2": - version "11.2.7" - resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-11.2.7.tgz#b29e2e95c6765c815786c0bc1d5aed9cb2bf7818" - integrity sha512-tzRNp7pzd5QmbtXNG/mhdcl7Awfu/Iz1RaVHY75zTdOkmHCuzMhRL83gWHSgOAcjS3CCbyfwUHMZgRJb4kAfpA== +"@testing-library/react@^12.0.0": + version "12.1.5" + resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-12.1.5.tgz#bb248f72f02a5ac9d949dea07279095fa577963b" + integrity sha512-OfTXCJUFgjd/digLUuPxa0+/3ZxsQmE7ub9kcbW/wi96Bh3o/p5vrETcBGfP17NWPGqeYYl5LTRpwyGoMC4ysg== dependencies: "@babel/runtime" "^7.12.5" - "@testing-library/dom" "^7.28.1" + "@testing-library/dom" "^8.0.0" + "@types/react-dom" "<18.0.0" "@tommoor/remove-markdown@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@tommoor/remove-markdown/-/remove-markdown-0.3.2.tgz#5288ddd0e26b6b173e76ebb31c94653b0dcff45d" integrity sha512-awcc9hfLZqyyZHOGzAHbnjgZJpQGS1W1oZZ5GXOTTnbKVdKQ4OWYbrRWPUvXI2YAKJazrcS8rxPh67PX3rpGkQ== -"@tootallnate/once@1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== - -"@trysound/sax@0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad" - integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== - -"@tsconfig/node10@^1.0.7": - version "1.0.9" - resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" - integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== - -"@tsconfig/node12@^1.0.7": - version "1.0.11" - resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" - integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== - -"@tsconfig/node14@^1.0.0": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" - integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== - -"@tsconfig/node16@^1.0.2": - version "1.0.4" - resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.4.tgz#0b92dcc0cc1c81f6f306a381f28e31b1a56536e9" - integrity sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA== +"@tootallnate/once@2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" + integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== -"@types/aria-query@^4.2.0": - version "4.2.1" - resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-4.2.1.tgz#78b5433344e2f92e8b306c06a5622c50c245bf6b" - integrity sha512-S6oPal772qJZHoRZLFc/XoZW2gFvwXusYUmXPXkgxJLuEk2vOt7jc4Yo6z/vtI0EBkbPBVrJJ0B+prLIKiWqHg== +"@types/aria-query@^5.0.1": + version "5.0.3" + resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.3.tgz#07570ebd25f9b516c910a91f7244052c9b58eabc" + integrity sha512-0Z6Tr7wjKJIk4OUEjVUQMtyunLDy339vcMaj38Kpj6jM2OE1p3S4kXExKZ7a3uXQAPCoy3sbrP1wibDKaf39oA== -"@types/babel__core@^7.0.0", "@types/babel__core@^7.1.14": - version "7.20.1" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.1.tgz#916ecea274b0c776fec721e333e55762d3a9614b" - integrity sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw== +"@types/babel__core@^7.1.18", "@types/babel__core@^7.20.3": + version "7.20.3" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.20.3.tgz#d5625a50b6f18244425a1359a858c73d70340778" + integrity sha512-54fjTSeSHwfan8AyHWrKbfBWiEUrNTZsUwPTDSNaaP1QDQIZbeNUg3a59E9D+375MzUw/x1vx2/0F5LBz+AeYA== dependencies: "@babel/parser" "^7.20.7" "@babel/types" "^7.20.7" @@ -4188,27 +3097,24 @@ "@babel/parser" "^7.1.0" "@babel/types" "^7.0.0" -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.4", "@types/babel__traverse@^7.0.6": +"@types/babel__traverse@*": version "7.11.1" resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.11.1.tgz#654f6c4f67568e24c23b367e947098c6206fa639" integrity sha512-Vs0hm0vPahPMYi9tDjtP66llufgO3ST16WXaSTtDGEl9cewAl3AibmxWw6TINOqHPT9z0uABKAYjT9jNSg4npw== dependencies: "@babel/types" "^7.3.0" -"@types/body-parser@*": - version "1.19.2" - resolved "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0" - integrity sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g== +"@types/chai-subset@^1.3.3": + version "1.3.4" + resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.4.tgz#7938fa929dd12db451457e4d6faa27bcd599a729" + integrity sha512-CCWNXrJYSUIojZ1149ksLl3AN9cmZ5djf+yUoVVV+NuYrtydItQVlL2ZDqyC6M6O9LWRnVf8yYDxbXHO2TfQZg== dependencies: - "@types/connect" "*" - "@types/node" "*" + "@types/chai" "*" -"@types/bonjour@^3.5.9": - version "3.5.10" - resolved "https://registry.yarnpkg.com/@types/bonjour/-/bonjour-3.5.10.tgz#0f6aadfe00ea414edc86f5d106357cda9701e275" - integrity sha512-p7ienRMiS41Nu2/igbJxxLDWrSZ0WxM8UQgCeO9KhoVF7cOVFkrKsiDr1EsJIla8vV3oEEjGcz11jc5yimhzZw== - dependencies: - "@types/node" "*" +"@types/chai@*", "@types/chai@^4.3.5": + version "4.3.9" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.9.tgz#144d762491967db8c6dea38e03d2206c2623feec" + integrity sha512-69TtiDzu0bcmKQv3yg1Zx409/Kd7r0b5F1PfpYJfSHzLGtB53547V4u+9iqKYsTu/O2ai6KTb0TInNpvuQ3qmg== "@types/codemirror@^5.60.2": version "5.60.5" @@ -4217,21 +3123,6 @@ dependencies: "@types/tern" "*" -"@types/connect-history-api-fallback@^1.3.5": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz#9fd20b3974bdc2bcd4ac6567e2e0f6885cb2cf41" - integrity sha512-4x5FkPpLipqwthjPsF7ZRbOv3uoLUFkTA9G9v583qi4pACvq0uTELrB8OLUzPWUI4IJIyvM85vzkV1nyiI2Lig== - dependencies: - "@types/express-serve-static-core" "*" - "@types/node" "*" - -"@types/connect@*": - version "3.4.35" - resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1" - integrity sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ== - dependencies: - "@types/node" "*" - "@types/d3-array@3.0.3": version "3.0.3" resolved "https://registry.yarnpkg.com/@types/d3-array/-/d3-array-3.0.3.tgz#87d990bf504d14ad6b16766979d04e943c046dac" @@ -4310,59 +3201,11 @@ dependencies: "@types/trusted-types" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== - dependencies: - "@types/eslint" "*" - "@types/estree" "*" - -"@types/eslint@*", "@types/eslint@^7.29.0 || ^8.4.1": - version "8.40.2" - resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.40.2.tgz#2833bc112d809677864a4b0e7d1de4f04d7dac2d" - integrity sha512-PRVjQ4Eh9z9pmmtaq8nTjZjQwKFk7YIHIud3lRoKRBgUQjgjRmoGxxGEPXQkF+lH7QkHJRNr5F4aBgYCW0lqpQ== - dependencies: - "@types/estree" "*" - "@types/json-schema" "*" - "@types/estree@*", "@types/estree@^1.0.0": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== -"@types/estree@0.0.39": - version "0.0.39" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.39.tgz#e177e699ee1b8c22d23174caaa7422644389509f" - integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== - -"@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": - version "4.17.35" - resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.35.tgz#c95dd4424f0d32e525d23812aa8ab8e4d3906c4f" - integrity sha512-wALWQwrgiB2AWTT91CB62b6Yt0sNHpznUXeZEcnPU3DRdlDIz74x8Qg1UUYKSVFi+va5vKOLYRBI1bRKiLLKIg== - dependencies: - "@types/node" "*" - "@types/qs" "*" - "@types/range-parser" "*" - "@types/send" "*" - -"@types/express@*", "@types/express@^4.17.13": - version "4.17.17" - resolved "https://registry.yarnpkg.com/@types/express/-/express-4.17.17.tgz#01d5437f6ef9cfa8668e616e13c2f2ac9a491ae4" - integrity sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q== - dependencies: - "@types/body-parser" "*" - "@types/express-serve-static-core" "^4.17.33" - "@types/qs" "*" - "@types/serve-static" "*" - -"@types/graceful-fs@^4.1.2": - version "4.1.5" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" - integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== - dependencies: - "@types/node" "*" - "@types/graphql@^14.5.0": version "14.5.0" resolved "https://registry.yarnpkg.com/@types/graphql/-/graphql-14.5.0.tgz#a545fb3bc8013a3547cf2f07f5e13a33642b75d6" @@ -4390,50 +3233,6 @@ "@types/react" "*" hoist-non-react-statics "^3.3.0" -"@types/html-minifier-terser@^6.0.0": - version "6.1.0" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" - integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== - -"@types/http-errors@*": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" - integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== - -"@types/http-proxy@^1.17.5", "@types/http-proxy@^1.17.8": - version "1.17.11" - resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.11.tgz#0ca21949a5588d55ac2b659b69035c84bd5da293" - integrity sha512-HC8G7c1WmaF2ekqpnFq626xd3Zz0uvaqFmBJNRZCGEZCXkvSdJoNFn/8Ygbd9fKNQj8UzLdCETaI0UWPAjK7IA== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762" - integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== - -"@types/istanbul-lib-report@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.0.tgz#508b13aa344fa4976234e75dddcc34925737d821" - integrity sha512-nwKNbvnwJ2/mndE9ItP/zc2TCzw6uuodnF4EHYWD+gCQDVBuRQL5UzbZD0/ezy1iKsFU2ZQiDqg4M9dN4+wZgA== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@*": - version "26.0.23" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-26.0.23.tgz#a1b7eab3c503b80451d019efb588ec63522ee4e7" - integrity sha512-ZHLmWMJ9jJ9PTiT58juykZpL7KjwJywFN3Rr2pTSkyQfydf/rk22yS7W8p5DaVUMQ2BQC7oYiU3FjbTM/mYrOA== - dependencies: - jest-diff "^26.0.0" - pretty-format "^26.0.0" - "@types/js-cookie@^2.2.6": version "2.2.6" resolved "https://registry.yarnpkg.com/@types/js-cookie/-/js-cookie-2.2.6.tgz#f1a1cb35aff47bc5cfb05cb0c441ca91e914c26f" @@ -4444,7 +3243,12 @@ resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.1.tgz#5544730b65a480b18ace6b6ce914e519cec2d43b" integrity sha512-xdOvNmXmrZqqPy3kuCQ+fz6wA0xU5pji9cd1nDrflWaAWtYLLGk5ykW0H6yg5TVyehHP1pfmuuSaZkhP+kspVA== -"@types/json-schema@*", "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": +"@types/json-schema@^7.0.12": + version "7.0.15" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== + +"@types/json-schema@^7.0.9": version "7.0.12" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== @@ -4476,25 +3280,17 @@ dependencies: "@types/unist" "*" -"@types/mime@*": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-3.0.1.tgz#5f8f2bca0a5863cb69bc0b0acd88c96cb1d4ae10" - integrity sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA== - -"@types/mime@^1": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" - integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== - "@types/min-document@^2.19.0": version "2.19.0" resolved "https://registry.yarnpkg.com/@types/min-document/-/min-document-2.19.0.tgz#4f9919e789917c00de967a2c38fa8d234cbcd7d6" integrity sha512-lsYeSW1zfNqHTL1RuaOgfAhoiOWV1RAQDKT0BZ26z4Faz8llVIj1r1ablUo5QY6yzHMketuvu4+N0sv0eZpXTg== "@types/node@*": - version "15.6.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-15.6.1.tgz#32d43390d5c62c5b6ec486a9bc9c59544de39a08" - integrity sha512-7EIraBEyRHEe7CH+Fm1XvgqU6uwZN8Q7jppJGcqjROMT29qhAuuOxYB1uEY5UMYQKEmA5D+5tBnhdaPXSsLONA== + version "20.8.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.8.10.tgz#a5448b895c753ae929c26ce85cab557c6d4a365e" + integrity sha512-TlgT8JntpcbmKUFzjhsyhGfP2fsiz1Mv56im6enJ905xG1DAYesxJaeSbGqQmAw8OWPdhyJGhGSQGKRNJ45u9w== + dependencies: + undici-types "~5.26.4" "@types/node@^12.19.9": version "12.20.13" @@ -4521,11 +3317,6 @@ resolved "https://registry.yarnpkg.com/@types/parse5/-/parse5-5.0.3.tgz#e7b5aebbac150f8b5fdd4a46e7f0bd8e65e19109" integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== -"@types/prettier@^2.1.5": - version "2.7.3" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.3.tgz#3e51a17e291d01d17d3fc61422015a933af7a08f" - integrity sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA== - "@types/prismjs@*": version "1.26.0" resolved "https://registry.yarnpkg.com/@types/prismjs/-/prismjs-1.26.0.tgz#a1c3809b0ad61c62cac6d4e0c56d610c910b7654" @@ -4536,16 +3327,6 @@ resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== -"@types/q@^1.5.1": - version "1.5.4" - resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.4.tgz#15925414e0ad2cd765bfef58842f7e26a7accb24" - integrity sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug== - -"@types/qs@*": - version "6.9.7" - resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb" - integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw== - "@types/query-string@^6.3.0": version "6.3.0" resolved "https://registry.yarnpkg.com/@types/query-string/-/query-string-6.3.0.tgz#b6fa172a01405abcaedac681118e78429d62ea39" @@ -4558,11 +3339,6 @@ resolved "https://registry.yarnpkg.com/@types/querystringify/-/querystringify-2.0.0.tgz#d1eab3214ee2b57c3bd7eba0ab94b231028522fb" integrity sha512-9WgEGTevECrXJC2LSWPqiPYWq8BRmeaOyZn47js/3V6UF0PWtcVfvvR43YjeO8BzBsthTz98jMczujOwTw+WYg== -"@types/range-parser@*": - version "1.2.4" - resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc" - integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== - "@types/react-color@^3.0.6": version "3.0.6" resolved "https://registry.yarnpkg.com/@types/react-color/-/react-color-3.0.6.tgz#602fed023802b2424e7cd6ff3594ccd3d5055f9a" @@ -4578,6 +3354,13 @@ dependencies: "@types/react" "*" +"@types/react-dom@<18.0.0": + version "17.0.22" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.22.tgz#34317e08be27b33fa9e7cdb56125b22538261bad" + integrity sha512-wHt4gkdSMb4jPp1vc30MLJxoWGsZs88URfmt3FRXoOEYrrqK3I8IuZLE/uFBb4UT6MRfI0wXFu4DS7LS0kUC7Q== + dependencies: + "@types/react" "^17" + "@types/react-dom@^17.0.0": version "17.0.5" resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.5.tgz#df44eed5b8d9e0b13bb0cd38e0ea6572a1231227" @@ -4625,6 +3408,15 @@ "@types/scheduler" "*" csstype "^3.0.2" +"@types/react@^17": + version "17.0.69" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.69.tgz#245a0cf2f5b0fb1d645691d3083e3c7d4409b98f" + integrity sha512-klEeru//GhiQvXUBayz0Q4l3rKHWsBR/EUOhOeow6hK2jV7MlO44+8yEk6+OtPeOlRfnpUnrLXzGK+iGph5aeg== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + "@types/reactcss@*": version "1.2.6" resolved "https://registry.yarnpkg.com/@types/reactcss/-/reactcss-1.2.6.tgz#133c1e7e896f2726370d1d5a26bf06a30a038bcc" @@ -4639,18 +3431,6 @@ dependencies: "@types/prismjs" "*" -"@types/resolve@1.17.1": - version "1.17.1" - resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6" - integrity sha512-yy7HuzQhj0dhGpD8RLXSZWEkLsV9ibvxvi6EiJ3bkqLAO1RGo0WbkWQiwpRlSFymTJRz0d3k5LM3kkx8ArDbLw== - dependencies: - "@types/node" "*" - -"@types/retry@0.12.0": - version "0.12.0" - resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.0.tgz#2b35eccfcee7d38cd72ad99232fbd58bffb3c84d" - integrity sha512-wWKOClTTiizcZhXnPY4wikVAwmdYHp8q6DmC+EJUzAMsycb7HB32Kh9RN4+0gExjmPmZSAQjgURXIGATPegAvA== - "@types/scheduler@*": version "0.16.1" resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.1.tgz#18845205e86ff0038517aab7a18a62a6b9f71275" @@ -4661,41 +3441,10 @@ resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.0.tgz#591c1ce3a702c45ee15f47a42ade72c2fd78978a" integrity sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw== -"@types/send@*": - version "0.17.1" - resolved "https://registry.yarnpkg.com/@types/send/-/send-0.17.1.tgz#ed4932b8a2a805f1fe362a70f4e62d0ac994e301" - integrity sha512-Cwo8LE/0rnvX7kIIa3QHCkcuF21c05Ayb0ZfxPiv0W8VRiZiNW/WuRupHKpqqGVGf7SUA44QSOUKaEd9lIrd/Q== - dependencies: - "@types/mime" "^1" - "@types/node" "*" - -"@types/serve-index@^1.9.1": - version "1.9.1" - resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278" - integrity sha512-d/Hs3nWDxNL2xAczmOVZNj92YZCS6RGxfBPjKzuu/XirCgXdpKEb88dYNbrYGint6IVWLNP+yonwVAuRC0T2Dg== - dependencies: - "@types/express" "*" - -"@types/serve-static@*", "@types/serve-static@^1.13.10": - version "1.15.2" - resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.15.2.tgz#3e5419ecd1e40e7405d34093f10befb43f63381a" - integrity sha512-J2LqtvFYCzaj8pVYKw8klQXrLLk7TBZmQ4ShlcdkELFKGwGMfevMLneMMRkMgZxotOD9wg497LpC7O8PcvAmfw== - dependencies: - "@types/http-errors" "*" - "@types/mime" "*" - "@types/node" "*" - -"@types/sockjs@^0.3.33": - version "0.3.33" - resolved "https://registry.yarnpkg.com/@types/sockjs/-/sockjs-0.3.33.tgz#570d3a0b99ac995360e3136fd6045113b1bd236f" - integrity sha512-f0KEEe05NvUnat+boPTZ0dgaLZ4SfSouXUgv5noUiefG2ajgKjmETo9ZJyuqsl7dfl2aHlLJUiki6B4ZYldiiw== - dependencies: - "@types/node" "*" - -"@types/stack-utils@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.0.tgz#7036640b4e21cc2f259ae826ce843d277dad8cff" - integrity sha512-RJJrrySY7A8havqpGObOB4W92QXKJo63/jFLLgpvOtsGUqbQZ9Sbgl35KMm1DjC6j7AvmmU2bIno+3IyEaemaw== +"@types/semver@^7.5.0": + version "7.5.6" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.6.tgz#c65b2bfce1bec346582c07724e3f8c1017a20339" + integrity sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A== "@types/string.prototype.matchall@^4.0.1": version "4.0.1" @@ -4718,19 +3467,12 @@ dependencies: "@types/estree" "*" -"@types/testing-library__jest-dom@^5.9.1": - version "5.9.5" - resolved "https://registry.yarnpkg.com/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.9.5.tgz#5bf25c91ad2d7b38f264b12275e5c92a66d849b0" - integrity sha512-ggn3ws+yRbOHog9GxnXiEZ/35Mow6YtPZpd7Z5mKDeZS/o7zx3yAle0ov/wjhVB5QT4N2Dt+GNoGCdqkBGCajQ== - dependencies: - "@types/jest" "*" - "@types/throttle-debounce@^2.1.0": version "2.1.0" resolved "https://registry.yarnpkg.com/@types/throttle-debounce/-/throttle-debounce-2.1.0.tgz#1c3df624bfc4b62f992d3012b84c56d41eab3776" integrity sha512-5eQEtSCoESnh2FsiLTxE121IiE60hnMqcb435fShf4bpLRjEu1Eoekht23y6zXS9Ts3l+Szu3TARnTsA0GkOkQ== -"@types/trusted-types@*", "@types/trusted-types@^2.0.2": +"@types/trusted-types@*": version "2.0.3" resolved "https://registry.yarnpkg.com/@types/trusted-types/-/trusted-types-2.0.3.tgz#a136f83b0758698df454e328759dbd3d44555311" integrity sha512-NfQ4gyz38SL8sDNrSixxU2Os1a5xcdFxipAFxYEuLUlvU2uDwS4NUpsImcf1//SlWItCVMMLiylsxbmNMToV/g== @@ -4752,45 +3494,12 @@ dependencies: "@types/node" "*" -"@types/ws@^8.5.5": - version "8.5.5" - resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.5.tgz#af587964aa06682702ee6dcbc7be41a80e4b28eb" - integrity sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg== - dependencies: - "@types/node" "*" - -"@types/yargs-parser@*": - version "20.2.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-20.2.0.tgz#dd3e6699ba3237f0348cd085e4698780204842f9" - integrity sha512-37RSHht+gzzgYeobbG+KWryeAW8J33Nhr69cjTqSYymXVZEN9NbRYWoYlRtDhHKPVT1FyNKwaTPC1NynKZpzRA== - -"@types/yargs@^15.0.0": - version "15.0.13" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.13.tgz#34f7fec8b389d7f3c1fd08026a5763e072d3c6dc" - integrity sha512-kQ5JNTrbDv3Rp5X2n/iUu37IJBDU2gsZ5R/g1/KHOOEc5IKfUFjXT6DENPGduh08I/pamwtEq4oul7gUqKTQDQ== - dependencies: - "@types/yargs-parser" "*" - -"@types/yargs@^16.0.0": - version "16.0.5" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-16.0.5.tgz#12cc86393985735a283e387936398c2f9e5f88e3" - integrity sha512-AxO/ADJOBFJScHbWhq2xAhlWP24rY4aCEG/NFaMvbT3X2MgRsLjhjQwsn0Zi5zn0LG9jUhCCZMeX9Dkuw6k+vQ== - dependencies: - "@types/yargs-parser" "*" - -"@types/yargs@^17.0.8": - version "17.0.24" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.24.tgz#b3ef8d50ad4aa6aecf6ddc97c580a00f5aa11902" - integrity sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw== - dependencies: - "@types/yargs-parser" "*" - "@types/zen-observable@^0.8.0": version "0.8.2" resolved "https://registry.yarnpkg.com/@types/zen-observable/-/zen-observable-0.8.2.tgz#808c9fa7e4517274ed555fa158f2de4b4f468e71" integrity sha512-HrCIVMLjE1MOozVoD86622S7aunluLb2PJdPfb3nYiEtohm8mIB/vyv0Fd37AdeMFrTUQXEunw78YloMA3Qilg== -"@typescript-eslint/eslint-plugin@^5.38.1", "@typescript-eslint/eslint-plugin@^5.5.0": +"@typescript-eslint/eslint-plugin@^5.38.1": version "5.60.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.60.1.tgz#81382d6ecb92b8dda70e91f9035611cb2fecd1c3" integrity sha512-KSWsVvsJsLJv3c4e73y/Bzt7OpqMCADUO846bHcuWYSYM19bldbAeDv7dYyV0jwkbMfJ2XdlzwjhXtuD7OY6bw== @@ -4806,14 +3515,7 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/experimental-utils@^5.0.0": - version "5.60.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-5.60.1.tgz#d783bb63b9183541019a945eda6a9d96b096d985" - integrity sha512-TXUdLxv2t8181nh5yLXl/Gr/zKj1ZofQ7m+ZdmG2+El0TYOHCvlZfc35D4nturemC3RUnf3KmLuFp3bVBjkG5w== - dependencies: - "@typescript-eslint/utils" "5.60.1" - -"@typescript-eslint/parser@^5.38.1", "@typescript-eslint/parser@^5.5.0": +"@typescript-eslint/parser@^5.38.1": version "5.60.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.60.1.tgz#0f2f58209c0862a73e3d5a56099abfdfa21d0fd3" integrity sha512-pHWlc3alg2oSMGwsU/Is8hbm3XFbcrb6P5wIxcQW9NsYBfnrubl/GhVVD/Jm/t8HXhA2WncoIRfBtnCgRGV96Q== @@ -4831,6 +3533,14 @@ "@typescript-eslint/types" "5.60.1" "@typescript-eslint/visitor-keys" "5.60.1" +"@typescript-eslint/scope-manager@6.14.0": + version "6.14.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz#53d24363fdb5ee0d1d8cda4ed5e5321272ab3d48" + integrity sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg== + dependencies: + "@typescript-eslint/types" "6.14.0" + "@typescript-eslint/visitor-keys" "6.14.0" + "@typescript-eslint/type-utils@5.60.1": version "5.60.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.60.1.tgz#17770540e98d65ab4730c7aac618003f702893f4" @@ -4846,6 +3556,11 @@ resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.60.1.tgz#a17473910f6b8d388ea83c9d7051af89c4eb7561" integrity sha512-zDcDx5fccU8BA0IDZc71bAtYIcG9PowaOwaD8rjYbqwK7dpe/UMQl3inJ4UtUK42nOCT41jTSCwg76E62JpMcg== +"@typescript-eslint/types@6.14.0": + version "6.14.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.14.0.tgz#935307f7a931016b7a5eb25d494ea3e1f613e929" + integrity sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA== + "@typescript-eslint/typescript-estree@5.60.1": version "5.60.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.60.1.tgz#8c71824b7165b64d5ebd7aa42968899525959834" @@ -4859,7 +3574,20 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/utils@5.60.1", "@typescript-eslint/utils@^5.58.0": +"@typescript-eslint/typescript-estree@6.14.0": + version "6.14.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz#90c7ddd45cd22139adf3d4577580d04c9189ac13" + integrity sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw== + dependencies: + "@typescript-eslint/types" "6.14.0" + "@typescript-eslint/visitor-keys" "6.14.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.5.4" + ts-api-utils "^1.0.1" + +"@typescript-eslint/utils@5.60.1": version "5.60.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.60.1.tgz#6861ebedbefba1ac85482d2bdef6f2ff1eb65b80" integrity sha512-tiJ7FFdFQOWssFa3gqb94Ilexyw0JVxj6vBzaSpfN/8IhoKkDuSAenUKvsSHw2A/TMpJb26izIszTXaqygkvpQ== @@ -4873,6 +3601,19 @@ eslint-scope "^5.1.1" semver "^7.3.7" +"@typescript-eslint/utils@^6.13.2": + version "6.14.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.14.0.tgz#856a9e274367d99ffbd39c48128b93a86c4261e3" + integrity sha512-XwRTnbvRr7Ey9a1NT6jqdKX8y/atWG+8fAIu3z73HSP8h06i3r/ClMhmaF/RGWGW1tHJEwij1uEg2GbEmPYvYg== + dependencies: + "@eslint-community/eslint-utils" "^4.4.0" + "@types/json-schema" "^7.0.12" + "@types/semver" "^7.5.0" + "@typescript-eslint/scope-manager" "6.14.0" + "@typescript-eslint/types" "6.14.0" + "@typescript-eslint/typescript-estree" "6.14.0" + semver "^7.5.4" + "@typescript-eslint/visitor-keys@5.60.1": version "5.60.1" resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.60.1.tgz#19a877358bf96318ec35d90bfe6bd1445cce9434" @@ -4881,6 +3622,14 @@ "@typescript-eslint/types" "5.60.1" eslint-visitor-keys "^3.3.0" +"@typescript-eslint/visitor-keys@6.14.0": + version "6.14.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz#1d1d486581819287de824a56c22f32543561138e" + integrity sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw== + dependencies: + "@typescript-eslint/types" "6.14.0" + eslint-visitor-keys "^3.4.1" + "@uiw/react-markdown-preview@3.0.6": version "3.0.6" resolved "https://registry.yarnpkg.com/@uiw/react-markdown-preview/-/react-markdown-preview-3.0.6.tgz#579900465f6d974c1a3e13c3a04db4113d0eb4db" @@ -5178,126 +3927,59 @@ "@visx/event" "3.0.1" prop-types "^15.6.2" -"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" - integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== - dependencies: - "@webassemblyjs/helper-numbers" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - -"@webassemblyjs/floating-point-hex-parser@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" - integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== - -"@webassemblyjs/helper-api-error@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" - integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== - -"@webassemblyjs/helper-buffer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" - integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== - -"@webassemblyjs/helper-numbers@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" - integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== - dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.6" - "@webassemblyjs/helper-api-error" "1.11.6" - "@xtuc/long" "4.2.2" - -"@webassemblyjs/helper-wasm-bytecode@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" - integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== - -"@webassemblyjs/helper-wasm-section@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" - integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - -"@webassemblyjs/ieee754@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" - integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== - dependencies: - "@xtuc/ieee754" "^1.2.0" - -"@webassemblyjs/leb128@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" - integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== - dependencies: - "@xtuc/long" "4.2.2" - -"@webassemblyjs/utf8@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" - integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== - -"@webassemblyjs/wasm-edit@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" - integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/helper-wasm-section" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-opt" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - "@webassemblyjs/wast-printer" "1.11.6" - -"@webassemblyjs/wasm-gen@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" - integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/ieee754" "1.11.6" - "@webassemblyjs/leb128" "1.11.6" - "@webassemblyjs/utf8" "1.11.6" - -"@webassemblyjs/wasm-opt@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" - integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - -"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" - integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-api-error" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/ieee754" "1.11.6" - "@webassemblyjs/leb128" "1.11.6" - "@webassemblyjs/utf8" "1.11.6" - -"@webassemblyjs/wast-printer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" - integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@xtuc/long" "4.2.2" +"@vitejs/plugin-react@^4.1.1": + version "4.1.1" + resolved "https://registry.yarnpkg.com/@vitejs/plugin-react/-/plugin-react-4.1.1.tgz#a10254dc76778027407d01b6ddbca53b23852a72" + integrity sha512-Jie2HERK+uh27e+ORXXwEP5h0Y2lS9T2PRGbfebiHGlwzDO0dEnd2aNtOR/qjBlPb1YgxwAONeblL1xqLikLag== + dependencies: + "@babel/core" "^7.23.2" + "@babel/plugin-transform-react-jsx-self" "^7.22.5" + "@babel/plugin-transform-react-jsx-source" "^7.22.5" + "@types/babel__core" "^7.20.3" + react-refresh "^0.14.0" + +"@vitest/expect@0.34.6": + version "0.34.6" + resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-0.34.6.tgz#608a7b7a9aa3de0919db99b4cc087340a03ea77e" + integrity sha512-QUzKpUQRc1qC7qdGo7rMK3AkETI7w18gTCUrsNnyjjJKYiuUB9+TQK3QnR1unhCnWRC0AbKv2omLGQDF/mIjOw== + dependencies: + "@vitest/spy" "0.34.6" + "@vitest/utils" "0.34.6" + chai "^4.3.10" + +"@vitest/runner@0.34.6": + version "0.34.6" + resolved "https://registry.yarnpkg.com/@vitest/runner/-/runner-0.34.6.tgz#6f43ca241fc96b2edf230db58bcde5b974b8dcaf" + integrity sha512-1CUQgtJSLF47NnhN+F9X2ycxUP0kLHQ/JWvNHbeBfwW8CzEGgeskzNnHDyv1ieKTltuR6sdIHV+nmR6kPxQqzQ== + dependencies: + "@vitest/utils" "0.34.6" + p-limit "^4.0.0" + pathe "^1.1.1" + +"@vitest/snapshot@0.34.6": + version "0.34.6" + resolved "https://registry.yarnpkg.com/@vitest/snapshot/-/snapshot-0.34.6.tgz#b4528cf683b60a3e8071cacbcb97d18b9d5e1d8b" + integrity sha512-B3OZqYn6k4VaN011D+ve+AA4whM4QkcwcrwaKwAbyyvS/NB1hCWjFIBQxAQQSQir9/RtyAAGuq+4RJmbn2dH4w== + dependencies: + magic-string "^0.30.1" + pathe "^1.1.1" + pretty-format "^29.5.0" + +"@vitest/spy@0.34.6": + version "0.34.6" + resolved "https://registry.yarnpkg.com/@vitest/spy/-/spy-0.34.6.tgz#b5e8642a84aad12896c915bce9b3cc8cdaf821df" + integrity sha512-xaCvneSaeBw/cz8ySmF7ZwGvL0lBjfvqc1LpQ/vcdHEvpLn3Ff1vAvjw+CoGn0802l++5L/pxb7whwcWAw+DUQ== + dependencies: + tinyspy "^2.1.1" + +"@vitest/utils@0.34.6": + version "0.34.6" + resolved "https://registry.yarnpkg.com/@vitest/utils/-/utils-0.34.6.tgz#38a0a7eedddb8e7291af09a2409cb8a189516968" + integrity sha512-IG5aDD8S6zlvloDsnzHw0Ut5xczlF+kv2BOTo+iXfPr54Yhi5qbVOgGB1hZaVq4iJ4C/MZ2J0y15IlsV/ZcI0A== + dependencies: + diff-sequences "^29.4.3" + loupe "^2.3.6" + pretty-format "^29.5.0" "@whatwg-node/events@^0.0.3": version "0.0.3" @@ -5376,16 +4058,6 @@ resolved "https://registry.yarnpkg.com/@xobotyi/scrollbar-width/-/scrollbar-width-1.9.5.tgz#80224a6919272f405b87913ca13b92929bdf3c4d" integrity sha512-N8tkAACJx2ww8vFMneJmaAgmjAG1tnVBZJRLRcx061tmsLRZHSEZSLuGWnwPtunsSLvSqXQ2wfp7Mgqg1I+2dQ== -"@xtuc/ieee754@^1.2.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@xtuc/ieee754/-/ieee754-1.2.0.tgz#eef014a3145ae477a1cbc00cd1e552336dceb790" - integrity sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA== - -"@xtuc/long@4.2.2": - version "4.2.2" - resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" - integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== - a11y-status@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/a11y-status/-/a11y-status-2.0.0.tgz#7a4ee09a499c378b2690ab47a1354b736ee20b81" @@ -5395,70 +4067,31 @@ a11y-status@^2.0.0: "@types/throttle-debounce" "^2.1.0" throttle-debounce "^3.0.1" -abab@^2.0.3, abab@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" - integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== - -accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: - version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" - integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== - dependencies: - mime-types "~2.1.34" - negotiator "0.6.3" - -acorn-globals@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-6.0.0.tgz#46cdd39f0f8ff08a876619b55f5ac8a6dc770b45" - integrity sha512-ZQl7LOWaF5ePqqcX4hLuv/bLXYQNfNWw2c0/yX/TsPRKamzHcTGQnlCjHT3TsmkOUVEPS3crCxiPfdzE/Trlhg== - dependencies: - acorn "^7.1.1" - acorn-walk "^7.1.1" - -acorn-import-assertions@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== +abab@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" + integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== -acorn-walk@^7.1.1: - version "7.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" - integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== - -acorn-walk@^8.1.1: - version "8.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" - integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== +acorn-walk@^8.2.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.0.tgz#2097665af50fd0cf7a2dfccd2b9368964e66540f" + integrity sha512-FS7hV565M5l1R08MXqo8odwMTB02C2UqzB17RVgu9EyuYFBqJZ3/ZY97sQD5FewVu1UyDFc1yztUDrAwT0EypA== -acorn@^7.1.1: - version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== +acorn@^8.10.0, acorn@^8.9.0: + version "8.11.2" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b" + integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w== -acorn@^8.2.4, acorn@^8.4.1, acorn@^8.7.1, acorn@^8.8.0, acorn@^8.8.2: +acorn@^8.8.0: version "8.9.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.9.0.tgz#78a16e3b2bcc198c10822786fa6679e245db5b59" integrity sha512-jaVNAFBHNLXspO543WnNNPZFRtavh3skAkITqD0/2aeMkKZTN+254PyhwxFYrk3vQ1xfY+2wbesJMs/JC8/PwQ== -address@^1.0.1, address@^1.1.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/address/-/address-1.2.2.tgz#2b5248dac5485a6390532c6a517fda2e3faac89e" - integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== - -adjust-sourcemap-loader@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/adjust-sourcemap-loader/-/adjust-sourcemap-loader-4.0.0.tgz#fc4a0fd080f7d10471f30a7320f25560ade28c99" - integrity sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A== - dependencies: - loader-utils "^2.0.0" - regex-parser "^2.2.11" - agent-base@6: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -5481,26 +4114,7 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv-formats@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" - integrity sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA== - dependencies: - ajv "^8.0.0" - -ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: - version "3.5.2" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" - integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== - -ajv-keywords@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-5.1.0.tgz#69d4d385a4733cdbeab44964a1170a88f87f0e16" - integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== - dependencies: - fast-deep-equal "^3.1.3" - -ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.10.0, ajv@^6.12.3, ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -5510,16 +4124,6 @@ ajv@^6.10.0, ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.0, ajv@^8.6.0, ajv@^8.9.0: - version "8.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" - integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - analytics-utils@^1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/analytics-utils/-/analytics-utils-1.0.12.tgz#07bd63471d238e80f42d557fba039365f09c50db" @@ -5536,19 +4140,14 @@ analytics@^0.8.9: "@analytics/core" "^0.12.7" "@analytics/storage-utils" "^0.4.2" -ansi-escapes@^4.2.1, ansi-escapes@^4.3.0, ansi-escapes@^4.3.1: +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== dependencies: type-fest "^0.21.3" -ansi-html-community@^0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" - integrity sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw== - -ansi-regex@3.0.1, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: +ansi-regex@3.0.1, ansi-regex@^5.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== @@ -5621,12 +4220,7 @@ antd@4.24.7: rc-util "^5.22.5" scroll-into-view-if-needed "^2.2.25" -any-promise@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" - integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== - -anymatch@^3.0.3, anymatch@~3.1.2: +anymatch@~3.1.2: version "3.1.3" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== @@ -5634,16 +4228,6 @@ anymatch@^3.0.3, anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" -arg@^4.1.0: - version "4.1.3" - resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" - integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== - -arg@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" - integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== - argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -5656,45 +4240,27 @@ argparse@^2.0.1: resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -aria-query@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" - integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== +aria-query@5.1.3: + version "5.1.3" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.1.3.tgz#19db27cd101152773631396f7a95a3b58c22c35e" + integrity sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ== dependencies: - "@babel/runtime" "^7.10.2" - "@babel/runtime-corejs3" "^7.10.2" + deep-equal "^2.0.5" -aria-query@^5.1.3: +aria-query@^5.0.0, aria-query@^5.1.3: version "5.3.0" resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e" integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== dependencies: dequal "^2.0.3" -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - -array-flatten@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" - integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== +array-buffer-byte-length@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" + integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== + dependencies: + call-bind "^1.0.2" + is-array-buffer "^3.0.1" array-includes@^3.1.5, array-includes@^3.1.6: version "3.1.6" @@ -5717,11 +4283,6 @@ array-union@^2.1.0: resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - array.prototype.flat@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz#ffc6576a7ca3efc2f46a143b9d1dda9b4b3cf5e2" @@ -5753,7 +4314,7 @@ array.prototype.tosorted@^1.1.1: es-shim-unscopables "^1.0.0" get-intrinsic "^1.1.3" -asap@~2.0.3, asap@~2.0.6: +asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= @@ -5779,10 +4340,10 @@ assert-plus@1.0.0, assert-plus@^1.0.0: resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= +assertion-error@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== ast-types-flow@^0.0.7: version "0.0.7" @@ -5809,32 +4370,15 @@ asynckit@^0.4.0: resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= -at-least-node@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" - integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== - -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - auto-bind@~4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/auto-bind/-/auto-bind-4.0.0.tgz#e3589fc6c2da8f7ca43ba9f84fa52a744fc997fb" integrity sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ== -autoprefixer@^10.4.12, autoprefixer@^10.4.13: - version "10.4.14" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.14.tgz#e28d49902f8e759dd25b153264e862df2705f79d" - integrity sha512-FQzyfOsTlwVzjHxKEqRIAdJx9niO6VCBCoEwax/VLSoQF29ggECcPuBqUMZ+u8jCZOPSy8b8/8KnuFbp0SaFZQ== - dependencies: - browserslist "^4.21.5" - caniuse-lite "^1.0.30001464" - fraction.js "^4.2.0" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" +available-typed-arrays@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== aws-sign2@~0.7.0: version "0.7.0" @@ -5851,15 +4395,6 @@ axe-core@^4.6.2: resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== -axios@^1.6.1: - version "1.6.2" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.2.tgz#de67d42c755b571d3e698df1b6504cde9b0ee9f2" - integrity sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A== - dependencies: - follow-redirects "^1.15.0" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - axobject-query@^3.1.1: version "3.2.1" resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.2.1.tgz#39c378a6e3b06ca679f29138151e45b2b32da62a" @@ -5867,20 +4402,6 @@ axobject-query@^3.1.1: dependencies: dequal "^2.0.3" -babel-jest@^27.4.2, babel-jest@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-27.5.1.tgz#a1bf8d61928edfefd21da27eb86a695bfd691444" - integrity sha512-cdQ5dXjGRd0IBRATiQ4mZGlGlRE8kJpjPOixdNRdT+m3UcNqmYWN6rK6nvtXYfY3D76cb8s/O1Ss8ea24PIwcg== - dependencies: - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - babel-literal-to-ast@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/babel-literal-to-ast/-/babel-literal-to-ast-2.1.0.tgz#c8b12f9c36a8cee13572d65aabf6cff8adb1e8b3" @@ -5890,44 +4411,6 @@ babel-literal-to-ast@^2.1.0: "@babel/traverse" "^7.1.6" "@babel/types" "^7.1.6" -babel-loader@^8.2.3: - version "8.3.0" - resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.3.0.tgz#124936e841ba4fe8176786d6ff28add1f134d6a8" - integrity sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== - dependencies: - find-cache-dir "^3.3.1" - loader-utils "^2.0.0" - make-dir "^3.1.0" - schema-utils "^2.6.5" - -babel-plugin-import@1.13.5: - version "1.13.5" - resolved "https://registry.yarnpkg.com/babel-plugin-import/-/babel-plugin-import-1.13.5.tgz#42eed1c5afd9a35ee1b1f8fe922b07c44077d753" - integrity sha512-IkqnoV+ov1hdJVofly9pXRJmeDm9EtROfrc5i6eII0Hix2xMs5FEm8FG3ExMvazbnZBbgHIt6qdO8And6lCloQ== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.5.1.tgz#9be98ecf28c331eb9f5df9c72d6f89deb8181c2e" - integrity sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.0.0" - "@types/babel__traverse" "^7.0.6" - babel-plugin-macros@^2.5.0: version "2.8.0" resolved "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" @@ -5946,35 +4429,6 @@ babel-plugin-macros@^3.1.0: cosmiconfig "^7.0.0" resolve "^1.19.0" -babel-plugin-named-asset-import@^0.3.8: - version "0.3.8" - resolved "https://registry.yarnpkg.com/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz#6b7fa43c59229685368683c28bc9734f24524cc2" - integrity sha512-WXiAc++qo7XcJ1ZnTYGtLxmBCVbddAml3CEXgWaBzNzLNoxtQ8AiGEFDMOhot9XjTCQbvP5E77Fj9Gk924f00Q== - -babel-plugin-polyfill-corejs2@^0.4.3: - version "0.4.3" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.3.tgz#75044d90ba5043a5fb559ac98496f62f3eb668fd" - integrity sha512-bM3gHc337Dta490gg+/AseNB9L4YLHxq1nGKZZSHbhXv4aTYU2MD2cjza1Ru4S6975YLTaL1K8uJf6ukJhhmtw== - dependencies: - "@babel/compat-data" "^7.17.7" - "@babel/helper-define-polyfill-provider" "^0.4.0" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.8.1.tgz#39248263c38191f0d226f928d666e6db1b4b3a8a" - integrity sha512-ikFrZITKg1xH6pLND8zT14UPgjKHiGLqex7rGEZCH2EvhsneJaJPemmpQaIZV5AL03II+lXylw3UmddDK8RU5Q== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.0" - core-js-compat "^3.30.1" - -babel-plugin-polyfill-regenerator@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.5.0.tgz#e7344d88d9ef18a3c47ded99362ae4a757609380" - integrity sha512-hDJtKjMLVa7Z+LwnTCxoDLQj6wdc+B8dun7ayF2fYieI6OzfuvcLMB32ihJZ4UhCBwNYGl5bg/x/P9cMdnkc2g== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.4.0" - "babel-plugin-styled-components@>= 1.12.0": version "1.12.0" resolved "https://registry.yarnpkg.com/babel-plugin-styled-components/-/babel-plugin-styled-components-1.12.0.tgz#1dec1676512177de6b827211e9eda5a30db4f9b9" @@ -5995,29 +4449,6 @@ babel-plugin-syntax-trailing-function-commas@^7.0.0-beta.0: resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz#aa213c1435e2bffeb6fca842287ef534ad05d5cf" integrity sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ== -babel-plugin-transform-react-remove-prop-types@^0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz#f2edaf9b4c6a5fbe5c1d678bfb531078c1555f3a" - integrity sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA== - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - babel-preset-fbjs@^3.4.0: version "3.4.0" resolved "https://registry.yarnpkg.com/babel-preset-fbjs/-/babel-preset-fbjs-3.4.0.tgz#38a14e5a7a3b285a3f3a86552d650dca5cf6111c" @@ -6051,36 +4482,6 @@ babel-preset-fbjs@^3.4.0: "@babel/plugin-transform-template-literals" "^7.0.0" babel-plugin-syntax-trailing-function-commas "^7.0.0-beta.0" -babel-preset-jest@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-27.5.1.tgz#91f10f58034cb7989cb4f962b69fa6eef6a6bc81" - integrity sha512-Nptf2FzlPCWYuJg41HBqXVT8ym6bXOevuCTbhxlUpjwtysGaIWFvDEjp4y+G7fl13FgOdjs7P/DmErqH7da0Ag== - dependencies: - babel-plugin-jest-hoist "^27.5.1" - babel-preset-current-node-syntax "^1.0.0" - -babel-preset-react-app@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-react-app/-/babel-preset-react-app-10.0.1.tgz#ed6005a20a24f2c88521809fa9aea99903751584" - integrity sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== - dependencies: - "@babel/core" "^7.16.0" - "@babel/plugin-proposal-class-properties" "^7.16.0" - "@babel/plugin-proposal-decorators" "^7.16.4" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.16.0" - "@babel/plugin-proposal-numeric-separator" "^7.16.0" - "@babel/plugin-proposal-optional-chaining" "^7.16.0" - "@babel/plugin-proposal-private-methods" "^7.16.0" - "@babel/plugin-transform-flow-strip-types" "^7.16.0" - "@babel/plugin-transform-react-display-name" "^7.16.0" - "@babel/plugin-transform-runtime" "^7.16.4" - "@babel/preset-env" "^7.16.4" - "@babel/preset-react" "^7.16.0" - "@babel/preset-typescript" "^7.16.0" - "@babel/runtime" "^7.16.3" - babel-plugin-macros "^3.1.0" - babel-plugin-transform-react-remove-prop-types "^0.4.24" - bail@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/bail/-/bail-1.0.5.tgz#b6fa133404a392cbc1f8c4bf63f5953351e7a776" @@ -6101,24 +4502,6 @@ base64-js@^1.3.1: resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - -batch@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" - integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= - bcrypt-pbkdf@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" @@ -6126,21 +4509,6 @@ bcrypt-pbkdf@^1.0.0: dependencies: tweetnacl "^0.14.3" -bfj@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/bfj/-/bfj-7.0.2.tgz#1988ce76f3add9ac2913fd8ba47aad9e651bfbb2" - integrity sha512-+e/UqUzwmzJamNF50tBV6tZPTORow7gQ96iFow+8b562OdMpEK0BcJEq2OSPEDmAbSMBQ7PKZ87ubFkgxpYWgw== - dependencies: - bluebird "^3.5.5" - check-types "^11.1.1" - hoopy "^0.1.4" - tryer "^1.0.1" - -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" @@ -6160,39 +4528,6 @@ blacklist@^1.1.4: resolved "https://registry.yarnpkg.com/blacklist/-/blacklist-1.1.4.tgz#b2dd09d6177625b2caa69835a37b28995fa9a2f2" integrity sha512-DWdfwimA1WQxVC69Vs1Fy525NbYwisMSCdYQmW9zyzOByz9OB/tQwrKZ3T3pbTkuFjnkJFlJuyiDjPiXL5kzew== -bluebird@3.7.2, bluebird@^3.5.5: - version "3.7.2" - resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" - integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== - -body-parser@1.20.1: - version "1.20.1" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" - integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.11.0" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - -bonjour-service@^1.0.11: - version "1.1.1" - resolved "https://registry.yarnpkg.com/bonjour-service/-/bonjour-service-1.1.1.tgz#960948fa0e0153f5d26743ab15baf8e33752c135" - integrity sha512-Z/5lQRMOG9k7W+FkeGTNjh7htqn/2LMnfOvBZ8pynNZCM9MwkQkI3zeI4oz09uWdcgmgHugVvBqxGg4VQJ5PCg== - dependencies: - array-flatten "^2.1.2" - dns-equal "^1.0.0" - fast-deep-equal "^3.1.3" - multicast-dns "^7.2.5" - boolbase@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" @@ -6206,22 +4541,6 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^2.3.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" @@ -6229,12 +4548,7 @@ braces@^3.0.2, braces@~3.0.2: dependencies: fill-range "^7.0.1" -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^4.21.5: +browserslist@^4.21.3: version "4.21.9" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.9.tgz#e11bdd3c313d7e2a9e87e8b4b0c7872b13897635" integrity sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg== @@ -6245,13 +4559,13 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4 update-browserslist-db "^1.0.11" browserslist@^4.21.9: - version "4.22.2" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" - integrity sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A== + version "4.22.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.1.tgz#ba91958d1a59b87dab6fed8dfbcb3da5e2e9c619" + integrity sha512-FEVc202+2iuClEhZhrWy6ZiAcRLvNMyYcxZ8raemul1DYVOVdFsbqckWLdsixQZCpJlwe77Z3UTalE7jsjnKfQ== dependencies: - caniuse-lite "^1.0.30001565" - electron-to-chromium "^1.4.601" - node-releases "^2.0.14" + caniuse-lite "^1.0.30001541" + electron-to-chromium "^1.4.535" + node-releases "^2.0.13" update-browserslist-db "^1.0.13" bser@2.1.1: @@ -6266,11 +4580,6 @@ btoa@^1.2.1: resolved "https://registry.yarnpkg.com/btoa/-/btoa-1.2.1.tgz#01a9909f8b2c93f6bf680ba26131eb30f7fa3d73" integrity sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g== -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" @@ -6279,11 +4588,6 @@ buffer@^5.5.0: base64-js "^1.3.1" ieee754 "^1.1.13" -builtin-modules@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.2.0.tgz#45d5db99e7ee5e6bc4f362e008bf917ab5049887" - integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA== - busboy@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" @@ -6291,30 +4595,10 @@ busboy@^1.6.0: dependencies: streamsearch "^1.1.0" -bytes@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - integrity sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg= - -bytes@3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" - integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== - -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" +cac@^6.7.14: + version "6.7.14" + resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959" + integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ== call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" @@ -6324,6 +4608,15 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" + integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== + dependencies: + function-bind "^1.1.2" + get-intrinsic "^1.2.1" + set-function-length "^1.1.1" + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -6337,17 +4630,12 @@ camel-case@4.1.2, camel-case@^4.1.1, camel-case@^4.1.2: pascal-case "^3.1.2" tslib "^2.0.3" -camelcase-css@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" - integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== - -camelcase@^5.0.0, camelcase@^5.3.1: +camelcase@^5.0.0: version "5.3.1" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== -camelcase@^6.2.0, camelcase@^6.2.1: +camelcase@^6.2.0: version "6.3.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== @@ -6357,25 +4645,15 @@ camelize@^1.0.0: resolved "https://registry.yarnpkg.com/camelize/-/camelize-1.0.0.tgz#164a5483e630fa4321e5af07020e531831b2609b" integrity sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs= -caniuse-api@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" - integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== - dependencies: - browserslist "^4.0.0" - caniuse-lite "^1.0.0" - lodash.memoize "^4.1.2" - lodash.uniq "^4.5.0" - -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: +caniuse-lite@^1.0.30001503: version "1.0.30001508" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001508.tgz#4461bbc895c692a96da399639cc1e146e7302a33" integrity sha512-sdQZOJdmt3GJs1UMNpCCCyeuS2IEGLXnHyAo9yIO5JJDjbjoVRij4M1qep6P6gFpptD1PqIYgzM+gwJbOi92mw== -caniuse-lite@^1.0.30001565: - version "1.0.30001566" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001566.tgz#61a8e17caf3752e3e426d4239c549ebbb37fef0d" - integrity sha512-ggIhCsTxmITBAMmK8yZjEhCO5/47jKXPu6Dha/wuCS4JePVL+3uiDEBuhu2aIoT+bqTOR8L76Ip1ARL9xYsEJA== +caniuse-lite@^1.0.30001541: + version "1.0.30001559" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001559.tgz#95a982440d3d314c471db68d02664fb7536c5a30" + integrity sha512-cPiMKZgqgkg5LY3/ntGeLFUpi6tzddBNS58A4tnTgQw1zON7u2sZMU7SzOeVH4tj20++9ggL+V6FDOFMTaFFYA== capital-case@^1.0.4: version "1.0.4" @@ -6391,11 +4669,6 @@ case-anything@^2.1.10: resolved "https://registry.yarnpkg.com/case-anything/-/case-anything-2.1.10.tgz#d18a6ca968d54ec3421df71e3e190f3bced23410" integrity sha512-JczJwVrCP0jPKh05McyVsuOg6AYosrB9XWZKbQzXeDAm2ClE/PJE/BcrrQrVyGYH7Jg8V/LDupmyL4kFlVsVFQ== -case-sensitive-paths-webpack-plugin@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" - integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== - caseless@~0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" @@ -6406,7 +4679,20 @@ ccount@^1.0.0: resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== -chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: +chai@^4.3.10: + version "4.3.10" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.10.tgz#d784cec635e3b7e2ffb66446a63b4e33bd390384" + integrity sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g== + dependencies: + assertion-error "^1.1.0" + check-error "^1.0.3" + deep-eql "^4.1.3" + get-func-name "^2.0.2" + loupe "^2.3.6" + pathval "^1.1.1" + type-detect "^4.0.8" + +chalk@^2.0.0, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -6423,7 +4709,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.1: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -6481,16 +4767,6 @@ change-case@^4.1.2: snake-case "^3.0.4" tslib "^2.0.3" -char-regex@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -char-regex@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-2.0.1.tgz#6dafdb25f9d3349914079f010ba8d0e6ff9cd01e" - integrity sha512-oSvEeo6ZUD7NepqAat3RqoucZ5SeqLJgOvVIwkafu6IP3V0pO38s/ypdVUmDDK6qIIHNlYHJAKX9E7R7HoKElw== - character-entities-html4@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-1.1.4.tgz#0e64b0a3753ddbf1fdc044c5fd01d0199a02e125" @@ -6516,17 +4792,14 @@ chardet@^0.7.0: resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== -check-more-types@2.24.0: - version "2.24.0" - resolved "https://registry.npmjs.org/check-more-types/-/check-more-types-2.24.0.tgz#1420ffb10fd444dcfc79b43891bbfffd32a84600" - integrity sha1-FCD/sQ/URNz8ebQ4kbv//TKoRgA= - -check-types@^11.1.1: - version "11.1.2" - resolved "https://registry.yarnpkg.com/check-types/-/check-types-11.1.2.tgz#86a7c12bf5539f6324eb0e70ca8896c0e38f3e2f" - integrity sha512-tzWzvgePgLORb9/3a0YenggReLKAIb2owL03H2Xdoe5pKcUyWRSEQ8xfCar8t2SIAuEDwtmx2da1YB52YuHQMQ== +check-error@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.3.tgz#a6502e4312a7ee969f646e83bb3ddd56281bd694" + integrity sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg== + dependencies: + get-func-name "^2.0.2" -chokidar@^3.4.2, chokidar@^3.5.3: +chokidar@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -6541,31 +4814,6 @@ chokidar@^3.4.2, chokidar@^3.5.3: optionalDependencies: fsevents "~2.3.2" -chrome-trace-event@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" - integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== - -ci-info@^3.2.0: - version "3.8.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" - integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw== - -cjs-module-lexer@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" - integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== - -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - classnames@2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" @@ -6576,13 +4824,6 @@ classnames@2.x, classnames@^2.2.1, classnames@^2.2.3, classnames@^2.2.5, classna resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.2.tgz#351d813bf0137fcc6a76a16b88208d2560a0d924" integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== -clean-css@^5.2.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.2.tgz#70ecc7d4d4114921f5d298349ff86a31a9975224" - integrity sha512-JVJbM+f3d3Q704rF4bqQ5UUyTtuJ0JRKNbTKVEeujCCBoMdkEi+V+e8oktO9qGQNSvHrFTM6JZRXrUvGR1czww== - dependencies: - source-map "~0.6.0" - clean-stack@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" @@ -6640,15 +4881,6 @@ cliui@^8.0.1: strip-ansi "^6.0.1" wrap-ansi "^7.0.0" -clone-deep@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" - integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== - dependencies: - is-plain-object "^2.0.4" - kind-of "^6.0.2" - shallow-clone "^3.0.0" - clone@^1.0.2: version "1.0.4" resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" @@ -6659,38 +4891,11 @@ clsx@^1.2.1: resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= - -coa@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" - integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== - dependencies: - "@types/q" "^1.5.1" - chalk "^2.4.1" - q "^1.1.2" - codemirror@^5.62.0: version "5.65.10" resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.65.10.tgz#4276a93b8534ce91f14b733ba9a1ac949666eac9" integrity sha512-IXAG5wlhbgcTJ6rZZcmi4+sjWIbJqIGfeg3tNa3yX84Jb3T4huS5qzQAo/cUisc1l3bI47WZodpyf7cYcocDKg== -collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -6713,7 +4918,7 @@ color-hash@^2.0.1: color-name@1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@~1.1.4: version "1.1.4" @@ -6725,12 +4930,7 @@ color2k@^2.0.0: resolved "https://registry.yarnpkg.com/color2k/-/color2k-2.0.0.tgz#86992c82e248c29f524023ed0822bc152c4fa670" integrity sha512-DWX9eXOC4fbJNiuvdH4QSHvvfLWyFo9TuFp7V9OzdsbPAdrWAuYc8qvFP2bIQ/LKh4LrAVnJ6vhiQYPvAHdtTg== -colord@^2.9.1: - version "2.9.3" - resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" - integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== - -colorette@^2.0.10, colorette@^2.0.16: +colorette@^2.0.16: version "2.0.20" resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== @@ -6747,32 +4947,7 @@ comma-separated-tokens@^1.0.0: resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea" integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== -commander@^2.20.0: - version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - -commander@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" - integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== - -commander@^7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" - integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== - -commander@^8.3.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" - integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== - -common-path-prefix@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" - integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== - -common-tags@1.8.0, common-tags@^1.8.0: +common-tags@1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" integrity sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw== @@ -6782,36 +4957,6 @@ common-tags@1.8.2: resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= - -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -compressible@~2.0.16: - version "2.0.18" - resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" - integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== - dependencies: - mime-db ">= 1.43.0 < 2" - -compression@^1.7.4: - version "1.7.4" - resolved "https://registry.yarnpkg.com/compression/-/compression-1.7.4.tgz#95523eff170ca57c29a0ca41e6fe131f41e5bb8f" - integrity sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ== - dependencies: - accepts "~1.3.5" - bytes "3.0.0" - compressible "~2.0.16" - debug "2.6.9" - on-headers "~1.0.2" - safe-buffer "5.1.2" - vary "~1.1.2" - compute-scroll-into-view@^1.0.17: version "1.0.17" resolved "https://registry.yarnpkg.com/compute-scroll-into-view/-/compute-scroll-into-view-1.0.17.tgz#6a88f18acd9d42e9cf4baa6bec7e0522607ab7ab" @@ -6822,16 +4967,11 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= -confusing-browser-globals@^1.0.10, confusing-browser-globals@^1.0.11: +confusing-browser-globals@^1.0.10: version "1.0.11" resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== -connect-history-api-fallback@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz#647264845251a0daf25b97ce87834cace0f5f1c8" - integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== - constant-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" @@ -6841,19 +4981,7 @@ constant-case@^3.0.4: tslib "^2.0.3" upper-case "^2.0.2" -content-disposition@0.5.4: - version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" - integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== - dependencies: - safe-buffer "5.2.1" - -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -convert-source-map@^1.4.0, convert-source-map@^1.5.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: +convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.9.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== @@ -6863,27 +4991,12 @@ convert-source-map@^2.0.0: resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== - -cookie@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== - copy-anything@^2.0.1: - version "2.0.3" - resolved "https://registry.yarnpkg.com/copy-anything/-/copy-anything-2.0.3.tgz#842407ba02466b0df844819bbe3baebbe5d45d87" - integrity sha512-GK6QUtisv4fNS+XcI7shX0Gx9ORg7QqIznyfho79JTnX1XhLiyZHfftvGiziqzRiEi/Bjhgpi+D2o7HxJFPnDQ== + version "2.0.6" + resolved "https://registry.yarnpkg.com/copy-anything/-/copy-anything-2.0.6.tgz#092454ea9584a7b7ad5573062b2a87f5900fc480" + integrity sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw== dependencies: - is-what "^3.12.0" - -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + is-what "^3.14.1" copy-to-clipboard@^3.2.0, copy-to-clipboard@^3.3.1: version "3.3.3" @@ -6892,48 +5005,11 @@ copy-to-clipboard@^3.2.0, copy-to-clipboard@^3.3.1: dependencies: toggle-selection "^1.0.6" -copy-webpack-plugin@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-11.0.0.tgz#96d4dbdb5f73d02dd72d0528d1958721ab72e04a" - integrity sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ== - dependencies: - fast-glob "^3.2.11" - glob-parent "^6.0.1" - globby "^13.1.1" - normalize-path "^3.0.0" - schema-utils "^4.0.0" - serialize-javascript "^6.0.0" - -core-js-compat@^3.30.1, core-js-compat@^3.30.2: - version "3.31.0" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.31.0.tgz#4030847c0766cc0e803dcdfb30055d7ef2064bf1" - integrity sha512-hM7YCu1cU6Opx7MXNu0NuumM0ezNeAeRKadixyiQELWY3vT3De9S4J5ZBMraWV2vZnrE1Cirl0GtFtDtMUXzPw== - dependencies: - browserslist "^4.21.5" - -core-js-pure@^3.0.0, core-js-pure@^3.23.3: - version "3.31.0" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.31.0.tgz#052fd9e82fbaaf86457f5db1fadcd06f15966ff2" - integrity sha512-/AnE9Y4OsJZicCzIe97JP5XoPKQJfTuEG43aEVLFJGOJpyqELod+pE6LEl63DfG1Mp8wX97LDaDpy1GmLEUxlg== - -core-js@^3.19.2: - version "3.31.0" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.31.0.tgz#4471dd33e366c79d8c0977ed2d940821719db344" - integrity sha512-NIp2TQSGfR6ba5aalZD+ZQ1fSxGhDo/s1w0nx3RYzf2pnJxt7YynxFlFScP6eV7+GZsKO95NSjGxyJsU3DZgeQ== - -core-util-is@1.0.2, core-util-is@~1.0.0: +core-util-is@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cosmiconfig-typescript-loader@^1.0.0: - version "1.0.9" - resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz#69c523f7e8c3d9f27f563d02bbeadaf2f27212d3" - integrity sha512-tRuMRhxN4m1Y8hP9SNYfz7jRwt8lZdWxdjg/ohg5esKmsndJIn4yT96oJVcf5x0eA11taXl+sIp+ielu529k6g== - dependencies: - cosmiconfig "^7" - ts-node "^10.7.0" - cosmiconfig@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" @@ -6945,7 +5021,7 @@ cosmiconfig@^6.0.0: path-type "^4.0.0" yaml "^1.7.2" -cosmiconfig@^7, cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: +cosmiconfig@^7.0.0: version "7.1.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== @@ -6966,28 +5042,6 @@ cosmiconfig@^8.1.0, cosmiconfig@^8.1.3: parse-json "^5.2.0" path-type "^4.0.0" -craco-antd@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/craco-antd/-/craco-antd-2.0.0.tgz#f38977f4de1714e984ad4f68aae2bcce81bdab79" - integrity sha512-qrWBvsDM6ZmR5sBzEpB+rpyN3ZicewL4DTjAnKNR0YxzMzGrM6HqYgRgy+SplGtVvowFzo1a2PPmb8jfolWGrQ== - dependencies: - babel-plugin-import "1.13.5" - craco-less "2.0.0" - less-vars-to-js "1.3.0" - -craco-babel-loader@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/craco-babel-loader/-/craco-babel-loader-1.0.4.tgz#b887dff41f4ad1e8be84f90c1277af16475ada78" - integrity sha512-qoCsRWV/cMcucuIe+m09GRudCwAWtEhZl830MR2N0/weXBK+0VybsL19pUQtH0TwC33v9ll4myu/4TGBxnqZeA== - -craco-less@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/craco-less/-/craco-less-2.0.0.tgz#a2df18c32e97ebf00f62c3f2ea4cd97035f5f640" - integrity sha512-980mQaZVrC4ZsvOwvud6/AgvW7fLY3mW5m5+gR4sw4krxszgHb+qoRyOjqsYPD0F4oUmQoSiZSrlYY/bFGD9kQ== - dependencies: - less "^4.1.1" - less-loader "^7.3.0" - create-context-state@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/create-context-state/-/create-context-state-2.0.0.tgz#b58d33c553ef6c66958899d06fad4edf1b99ca24" @@ -7003,11 +5057,6 @@ create-react-class@^15.6.2: loose-envify "^1.3.1" object-assign "^4.1.1" -create-require@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" - integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== - cronstrue@^1.122.0: version "1.122.0" resolved "https://registry.yarnpkg.com/cronstrue/-/cronstrue-1.122.0.tgz#bd6838077b476d28f61d381398b47b8c3912a126" @@ -7027,7 +5076,7 @@ cross-inspect@1.0.0: dependencies: tslib "^2.4.0" -cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-spawn@^7.0.2: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -7036,35 +5085,11 @@ cross-spawn@^7.0.2, cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - -css-blank-pseudo@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/css-blank-pseudo/-/css-blank-pseudo-3.0.3.tgz#36523b01c12a25d812df343a32c322d2a2324561" - integrity sha512-VS90XWtsHGqoM0t4KpH053c4ehxZ2E6HtGI7x68YFV0pTo/QmkV/YFA+NnlvK8guxZVNWGQhVNJGC39Q8XF4OQ== - dependencies: - postcss-selector-parser "^6.0.9" - css-color-keywords@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/css-color-keywords/-/css-color-keywords-1.0.0.tgz#fea2616dc676b2962686b3af8dbdbe180b244e05" integrity sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU= -css-declaration-sorter@^6.3.1: - version "6.4.0" - resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-6.4.0.tgz#630618adc21724484b3e9505bce812def44000ad" - integrity sha512-jDfsatwWMWN0MODAFuHszfjphEXfNw9JUAhmY4pLu3TyTU+ohUpsbVtbU+1MZn4a47D9kqh03i4eyOm+74+zew== - -css-has-pseudo@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/css-has-pseudo/-/css-has-pseudo-3.0.4.tgz#57f6be91ca242d5c9020ee3e51bbb5b89fc7af73" - integrity sha512-Vse0xpR1K9MNlp2j5w1pgWIJtm1a8qS0JwS9goFYcImjlHEmywP9VUF05aGBXzGpDJF86QXk4L0ypBmwPhGArw== - dependencies: - postcss-selector-parser "^6.0.9" - css-in-js-utils@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/css-in-js-utils/-/css-in-js-utils-3.1.0.tgz#640ae6a33646d401fc720c54fc61c42cd76ae2bb" @@ -7072,63 +5097,6 @@ css-in-js-utils@^3.1.0: dependencies: hyphenate-style-name "^1.0.3" -css-loader@^6.5.1: - version "6.8.1" - resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.8.1.tgz#0f8f52699f60f5e679eab4ec0fcd68b8e8a50a88" - integrity sha512-xDAXtEVGlD0gJ07iclwWVkLoZOpEvAWaSyf6W18S2pOC//K8+qUDIx8IIT3D+HjnmkJPQeesOPv5aiUaJsCM2g== - dependencies: - icss-utils "^5.1.0" - postcss "^8.4.21" - postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.3" - postcss-modules-scope "^3.0.0" - postcss-modules-values "^4.0.0" - postcss-value-parser "^4.2.0" - semver "^7.3.8" - -css-minimizer-webpack-plugin@^3.2.0: - version "3.4.1" - resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.4.1.tgz#ab78f781ced9181992fe7b6e4f3422e76429878f" - integrity sha512-1u6D71zeIfgngN2XNRJefc/hY7Ybsxd74Jm4qngIXyUEk7fss3VUzuHxLAq/R8NAba4QU9OUSaMZlbpRc7bM4Q== - dependencies: - cssnano "^5.0.6" - jest-worker "^27.0.2" - postcss "^8.3.5" - schema-utils "^4.0.0" - serialize-javascript "^6.0.0" - source-map "^0.6.1" - -css-prefers-color-scheme@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/css-prefers-color-scheme/-/css-prefers-color-scheme-6.0.3.tgz#ca8a22e5992c10a5b9d315155e7caee625903349" - integrity sha512-4BqMbZksRkJQx2zAjrokiGMd07RqOa2IxIrrN10lyBe9xhn9DEvjUK79J6jkeiv9D9hQFXKb6g1jwU62jziJZA== - -css-select-base-adapter@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" - integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== - -css-select@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" - integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== - dependencies: - boolbase "^1.0.0" - css-what "^3.2.1" - domutils "^1.7.0" - nth-check "^1.0.2" - -css-select@^4.1.3: - version "4.3.0" - resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b" - integrity sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ== - dependencies: - boolbase "^1.0.0" - css-what "^6.0.1" - domhandler "^4.3.1" - domutils "^2.8.0" - nth-check "^2.0.1" - css-to-react-native@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/css-to-react-native/-/css-to-react-native-3.0.0.tgz#62dbe678072a824a689bcfee011fc96e02a7d756" @@ -7138,15 +5106,7 @@ css-to-react-native@^3.0.0: css-color-keywords "^1.0.0" postcss-value-parser "^4.0.2" -css-tree@1.0.0-alpha.37: - version "1.0.0-alpha.37" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" - integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== - dependencies: - mdn-data "2.0.4" - source-map "^0.6.1" - -css-tree@^1.1.2, css-tree@^1.1.3: +css-tree@^1.1.2: version "1.1.3" resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== @@ -7154,112 +5114,17 @@ css-tree@^1.1.2, css-tree@^1.1.3: mdn-data "2.0.14" source-map "^0.6.1" -css-what@^3.2.1: - version "3.4.2" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" - integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== - -css-what@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/css-what/-/css-what-6.1.0.tgz#fb5effcf76f1ddea2c81bdfaa4de44e79bac70f4" - integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== - css.escape@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb" - integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s= + integrity sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg== -css@^3.0.0: +cssstyle@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/css/-/css-3.0.0.tgz#4447a4d58fdd03367c516ca9f64ae365cee4aa5d" - integrity sha512-DG9pFfwOrzc+hawpmqX/dHYHJG+Bsdb0klhyi1sDneOgGOXy9wQIC8hzyVp1e4NRYDBdxcylvywPkkXCHAzTyQ== + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-3.0.0.tgz#17ca9c87d26eac764bb8cfd00583cff21ce0277a" + integrity sha512-N4u2ABATi3Qplzf0hWbVCdjenim8F3ojEXpBDF5hBpjzW182MjNGLqfmQ0SkSPeQ+V86ZXgeH8aXj6kayd4jgg== dependencies: - inherits "^2.0.4" - source-map "^0.6.1" - source-map-resolve "^0.6.0" - -cssdb@^7.1.0: - version "7.6.0" - resolved "https://registry.yarnpkg.com/cssdb/-/cssdb-7.6.0.tgz#beac8f7a5f676db62d3c33da517ef4c9eb008f8b" - integrity sha512-Nna7rph8V0jC6+JBY4Vk4ndErUmfJfV6NJCaZdurL0omggabiy+QB2HCQtu5c/ACLZ0I7REv7A4QyPIoYzZx0w== - -cssesc@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" - integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== - -cssnano-preset-default@^5.2.14: - version "5.2.14" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-5.2.14.tgz#309def4f7b7e16d71ab2438052093330d9ab45d8" - integrity sha512-t0SFesj/ZV2OTylqQVOrFgEh5uanxbO6ZAdeCrNsUQ6fVuXwYTxJPNAGvGTxHbD68ldIJNec7PyYZDBrfDQ+6A== - dependencies: - css-declaration-sorter "^6.3.1" - cssnano-utils "^3.1.0" - postcss-calc "^8.2.3" - postcss-colormin "^5.3.1" - postcss-convert-values "^5.1.3" - postcss-discard-comments "^5.1.2" - postcss-discard-duplicates "^5.1.0" - postcss-discard-empty "^5.1.1" - postcss-discard-overridden "^5.1.0" - postcss-merge-longhand "^5.1.7" - postcss-merge-rules "^5.1.4" - postcss-minify-font-values "^5.1.0" - postcss-minify-gradients "^5.1.1" - postcss-minify-params "^5.1.4" - postcss-minify-selectors "^5.2.1" - postcss-normalize-charset "^5.1.0" - postcss-normalize-display-values "^5.1.0" - postcss-normalize-positions "^5.1.1" - postcss-normalize-repeat-style "^5.1.1" - postcss-normalize-string "^5.1.0" - postcss-normalize-timing-functions "^5.1.0" - postcss-normalize-unicode "^5.1.1" - postcss-normalize-url "^5.1.0" - postcss-normalize-whitespace "^5.1.1" - postcss-ordered-values "^5.1.3" - postcss-reduce-initial "^5.1.2" - postcss-reduce-transforms "^5.1.0" - postcss-svgo "^5.1.0" - postcss-unique-selectors "^5.1.1" - -cssnano-utils@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-3.1.0.tgz#95684d08c91511edfc70d2636338ca37ef3a6861" - integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== - -cssnano@^5.0.6: - version "5.1.15" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.1.15.tgz#ded66b5480d5127fcb44dac12ea5a983755136bf" - integrity sha512-j+BKgDcLDQA+eDifLx0EO4XSA56b7uut3BQFH+wbSaSTuGLuiyTa/wbRYthUXX8LC9mLg+WWKe8h+qJuwTAbHw== - dependencies: - cssnano-preset-default "^5.2.14" - lilconfig "^2.0.3" - yaml "^1.10.2" - -csso@^4.0.2, csso@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/csso/-/csso-4.2.0.tgz#ea3a561346e8dc9f546d6febedd50187cf389529" - integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== - dependencies: - css-tree "^1.1.2" - -cssom@^0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== - -cssom@~0.3.6: - version "0.3.8" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" - integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== - -cssstyle@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" - integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" + rrweb-cssom "^0.6.0" csstype@^3.0.2, csstype@^3.0.6, csstype@^3.0.7, csstype@^3.1.0, csstype@^3.1.1: version "3.1.1" @@ -7378,14 +5243,14 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" -data-urls@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" - integrity sha512-X5eWTSXO/BJmpdIKCRuKUgSCgAN0OwliVK3yPKbwIWU1Tdw5BRajxlzMidvh+gwko9AfQ9zIj52pzF91Q3YAvQ== +data-urls@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-4.0.0.tgz#333a454eca6f9a5b7b0f1013ff89074c3f522dd4" + integrity sha512-/mMTei/JXPqvFqQtfyTowxmJVwr2PVAeCcDxyFf6LhoOu/09TX2OX3kb2wzi4DMXcfj4OItwDOnhl5oziPnT6g== dependencies: - abab "^2.0.3" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.0.0" + abab "^2.0.6" + whatwg-mimetype "^3.0.0" + whatwg-url "^12.0.0" dataloader@^2.2.2: version "2.2.2" @@ -7407,28 +5272,14 @@ debounce@^1.2.0, debounce@^1.2.1: resolved "https://registry.yarnpkg.com/debounce/-/debounce-1.2.1.tgz#38881d8f4166a5c5848020c11827b834bcb3e0a5" integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug== -debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: +debug@4, debug@^4.0.0, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" -debug@4.1.1: - version "4.1.1" - resolved "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - -debug@^3.2.6, debug@^3.2.7: +debug@^3.2.7: version "3.2.7" resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== @@ -7440,22 +5291,48 @@ decamelize@^1.2.0: resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= -decimal.js@^10.2.1: - version "10.2.1" - resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.2.1.tgz#238ae7b0f0c793d3e3cea410108b35a2c01426a3" - integrity sha512-KaL7+6Fw6i5A2XSnsbhm/6B+NuEA7TZ4vqxnd5tXz9sbKtrN9Srj8ab4vKVdK8YAqZO9P1kg45Y6YLoduPf+kw== +decimal.js@^10.4.3: + version "10.4.3" + resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.4.3.tgz#1044092884d245d1b7f65725fa4ad4c6f781cc23" + integrity sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA== decode-uri-component@^0.2.0: version "0.2.2" resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== -dedent@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" - integrity sha1-JJXduvbrh0q7Dhvp3yLS5aVEMmw= +deep-eql@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.1.3.tgz#7c7775513092f7df98d8df9996dd085eb668cc6d" + integrity sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw== + dependencies: + type-detect "^4.0.0" + +deep-equal@^2.0.5: + version "2.2.2" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.2.tgz#9b2635da569a13ba8e1cc159c2f744071b115daa" + integrity sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA== + dependencies: + array-buffer-byte-length "^1.0.0" + call-bind "^1.0.2" + es-get-iterator "^1.1.3" + get-intrinsic "^1.2.1" + is-arguments "^1.1.1" + is-array-buffer "^3.0.2" + is-date-object "^1.0.5" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + isarray "^2.0.5" + object-is "^1.1.5" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.5.0" + side-channel "^1.0.4" + which-boxed-primitive "^1.0.2" + which-collection "^1.0.1" + which-typed-array "^1.1.9" -deep-is@^0.1.3, deep-is@~0.1.3: +deep-is@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= @@ -7465,13 +5342,6 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-gateway@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" - integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== - dependencies: - execa "^5.0.0" - defaults@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" @@ -7479,10 +5349,14 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" -define-lazy-prop@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" - integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== +define-data-property@^1.0.1, define-data-property@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" + integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" define-properties@^1.1.3, define-properties@^1.1.4: version "1.1.4" @@ -7492,43 +5366,20 @@ define-properties@^1.1.3, define-properties@^1.1.4: has-property-descriptors "^1.0.0" object-keys "^1.1.1" -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== +define-properties@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" + define-data-property "^1.0.1" + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= -depd@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= - dependency-graph@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.11.0.tgz#ac0ce7ed68a54da22165a85e97a01d53f5eb2e27" @@ -7539,58 +5390,20 @@ dequal@^2.0.3: resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== -destroy@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" - integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== - detect-indent@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.0.0.tgz#0abd0f549f69fc6659a254fe96786186b6f528fd" integrity sha512-oSyFlqaTHCItVRGK5RmrmjB+CmaMOW7IaNA/kdxqhoa6d17j/5ce9O9eWXmV/KEdRwqpQA+Vqe8a8Bsybu4YnA== -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - detect-node-es@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/detect-node-es/-/detect-node-es-1.1.0.tgz#163acdf643330caa0b4cd7c21e7ee7755d6fa493" integrity sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ== -detect-node@^2.0.4: - version "2.1.0" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" - integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== - -detect-port-alt@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" - integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== - dependencies: - address "^1.0.1" - debug "^2.6.0" - -didyoumean@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" - integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== - -diff-sequences@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-26.6.2.tgz#48ba99157de1923412eed41db6b6d4aa9ca7c0b1" - integrity sha512-Mv/TDa3nZ9sbc5soK+OoA74BsS3mL37yixCvUAQkiuA4Wz6YtwP/K47n2rv2ovzHZvoiQeA5FTQOschKkEwB0Q== - -diff-sequences@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-27.5.1.tgz#eaecc0d327fd68c8d9672a1e64ab8dccb2ef5327" - integrity sha512-k1gCAXAsNgLwEL+Y8Wvl+M6oEFj5bgazfZULpS5CneoPPXRaCCW7dm+q21Ky2VEE5X+VeRDBVg1Pcvvsr4TtNQ== - -diff@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" - integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== +diff-sequences@^29.4.3: + version "29.6.3" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921" + integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q== diff@^5.0.0: version "5.0.0" @@ -7614,18 +5427,6 @@ dlv@^1.1.3: resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== -dns-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" - integrity sha1-s55/HabrCnW6nBcySzR1PEfgZU0= - -dns-packet@^5.2.2: - version "5.6.0" - resolved "https://registry.yarnpkg.com/dns-packet/-/dns-packet-5.6.0.tgz#2202c947845c7a63c23ece58f2f70ff6ab4c2f7d" - integrity sha512-rza3UH1LwdHh9qyPXp8lkwpjSNk/AMD3dPytUoRoqnypDUhY0xvbdmVhWOfxO68frEfV9BU8V12Ez7ZsHGZpCQ== - dependencies: - "@leichtgewicht/ip-codec" "^2.0.1" - doctrine@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" @@ -7640,23 +5441,16 @@ doctrine@^3.0.0: dependencies: esutils "^2.0.2" -dom-accessibility-api@^0.5.4: - version "0.5.4" - resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.4.tgz#b06d059cdd4a4ad9a79275f9d414a5c126241166" - integrity sha512-TvrjBckDy2c6v6RLxPv5QXOnU+SmF9nBII5621Ve5fu6Z/BDrENurBEvlC1f44lKEUVqOpK4w9E5Idc5/EgkLQ== +dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: + version "0.5.16" + resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz#5a7429e6066eb3664d911e33fb0e45de8eb08453" + integrity sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg== dom-align@^1.7.0: version "1.12.2" resolved "https://registry.yarnpkg.com/dom-align/-/dom-align-1.12.2.tgz#0f8164ebd0c9c21b0c790310493cd855892acd4b" integrity sha512-pHuazgqrsTFrGU2WLDdXxCFabkdQDx72ddkraZNih1KsMcN5qsRSTR9O4VJRlwTPCPb5COYg3LOfiMHHcPInHg== -dom-converter@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.2.0.tgz#6721a9daee2e293682955b6afe416771627bb768" - integrity sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA== - dependencies: - utila "~0.4" - dom-helpers@^5.0.1: version "5.2.1" resolved "https://registry.yarnpkg.com/dom-helpers/-/dom-helpers-5.2.1.tgz#d9400536b2bf8225ad98fe052e029451ac40e902" @@ -7665,51 +5459,17 @@ dom-helpers@^5.0.1: "@babel/runtime" "^7.8.7" csstype "^3.0.2" -dom-serializer@0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" - integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== - dependencies: - domelementtype "^2.0.1" - entities "^2.0.0" - -dom-serializer@^1.0.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-1.4.1.tgz#de5d41b1aea290215dc45a6dae8adcf1d32e2d30" - integrity sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.2.0" - entities "^2.0.0" - dom-walk@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.2.tgz#0c548bef048f4d1f2a97249002236060daa3fd84" integrity sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w== -domelementtype@1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" - integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== - -domelementtype@^2.0.1, domelementtype@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" - integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== - -domexception@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/domexception/-/domexception-2.0.1.tgz#fb44aefba793e1574b0af6aed2801d057529f304" - integrity sha512-yxJ2mFy/sibVQlu5qHjOkf9J3K6zgmCxgJ94u2EdvDOV09H+32LtRswEcUsmUWN72pVLOEnTSRaIVVzVQgS0dg== - dependencies: - webidl-conversions "^5.0.0" - -domhandler@^4.0.0, domhandler@^4.2.0, domhandler@^4.3.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-4.3.1.tgz#8d792033416f59d68bc03a5aa7b018c1ca89279c" - integrity sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ== +domexception@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-4.0.0.tgz#4ad1be56ccadc86fc76d033353999a8037d03673" + integrity sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw== dependencies: - domelementtype "^2.2.0" + webidl-conversions "^7.0.0" domino@^2.1.6: version "2.1.6" @@ -7721,23 +5481,6 @@ dompurify@^2.3.8: resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.3.8.tgz#224fe9ae57d7ebd9a1ae1ac18c1c1ca3f532226f" integrity sha512-eVhaWoVibIzqdGYjwsBWodIQIaXFSB+cKDf4cfxLMsK0xiud6SE+/WCVx/Xw/UwQsa4cS3T2eITcdtmTg2UKcw== -domutils@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" - integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== - dependencies: - dom-serializer "0" - domelementtype "1" - -domutils@^2.5.2, domutils@^2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/domutils/-/domutils-2.8.0.tgz#4437def5db6e2d1f5d6ee859bd95ca7d02048135" - integrity sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A== - dependencies: - dom-serializer "^1.0.1" - domelementtype "^2.2.0" - domhandler "^4.2.0" - dot-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" @@ -7746,16 +5489,6 @@ dot-case@^3.0.4: no-case "^3.0.4" tslib "^2.0.3" -dotenv-expand@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" - integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== - -dotenv@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" - integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== - dotenv@^16.0.0: version "16.3.1" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" @@ -7771,7 +5504,7 @@ dset@^3.1.2: resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.3.tgz#c194147f159841148e8e34ca41f638556d9542d2" integrity sha512-20TuZZHCEZ2O71q9/+8BwKwZ0QtD9D8ObhrihJPr+vLLYlSuAU3/zL4cSlgbfeoGHTjCSJBa7NGcrF9/Bx/WJQ== -duplexer@^0.1.2, duplexer@~0.1.1: +duplexer@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== @@ -7784,12 +5517,7 @@ ecc-jsbn@~0.1.1: jsbn "~0.1.0" safer-buffer "^2.1.0" -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== - -ejs@^3.1.5, ejs@^3.1.6: +ejs@^3.1.5: version "3.1.9" resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== @@ -7801,20 +5529,10 @@ electron-to-chromium@^1.4.431: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.441.tgz#94dd9c1cbf081d83f032a4f1cd9f787e21fc24ce" integrity sha512-LlCgQ8zgYZPymf5H4aE9itwiIWH4YlCiv1HFLmmcBeFYi5E+3eaIFnjHzYtcFQbaKfAW+CqZ9pgxo33DZuoqPg== -electron-to-chromium@^1.4.601: - version "1.4.601" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.601.tgz#cac69868548aee89961ffe63ff5a7716f0685b75" - integrity sha512-SpwUMDWe9tQu8JX5QCO1+p/hChAi9AE9UpoC3rcHVc+gdCGlbT3SGb5I1klgb952HRIyvt9wZhSz9bNBYz9swA== - -emittery@^0.10.2: - version "0.10.2" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" - integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== - -emittery@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.8.1.tgz#bb23cc86d03b30aa75a7f734819dee2e1ba70860" - integrity sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== +electron-to-chromium@^1.4.535: + version "1.4.576" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.576.tgz#0c6940fdc0d60f7e34bd742b29d8fa847c9294d1" + integrity sha512-yXsZyXJfAqzWk1WKryr0Wl0MN2D47xodPvEEwlVePBnhU5E7raevLQR+E6b9JAD3GfL/7MbAL9ZtWQQPcLx7wA== emoji-regex@^8.0.0: version "8.0.0" @@ -7851,28 +5569,10 @@ emojibase@^6.0.0: resolved "https://registry.yarnpkg.com/emojibase/-/emojibase-6.1.0.tgz#c3bc281e998a0e06398416090c23bac8c5ed3ee8" integrity sha512-1GkKJPXP6tVkYJHOBSJHoGOr/6uaDxZ9xJ6H7m6PfdGXTmQgbALHLWaVRY4Gi/qf5x/gT/NUXLPuSHYLqtLtrQ== -emojis-list@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" - integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== - -enhanced-resolve@^5.15.0: - version "5.15.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" - integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== - dependencies: - graceful-fs "^4.2.4" - tapable "^2.2.0" - -entities@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" - integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== +entities@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== errno@^0.1.1: version "0.1.8" @@ -7895,7 +5595,7 @@ error-stack-parser@^2.0.6: dependencies: stackframe "^1.1.1" -es-abstract@^1.17.2, es-abstract@^1.18.0-next.2, es-abstract@^1.19.0, es-abstract@^1.20.4: +es-abstract@^1.19.0, es-abstract@^1.20.4: version "1.20.4" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== @@ -7925,10 +5625,20 @@ es-abstract@^1.17.2, es-abstract@^1.18.0-next.2, es-abstract@^1.19.0, es-abstrac string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" -es-module-lexer@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" - integrity sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA== +es-get-iterator@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.3.tgz#3ef87523c5d464d41084b2c3c9c214f1199763d6" + integrity sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + has-symbols "^1.0.3" + is-arguments "^1.1.1" + is-map "^2.0.2" + is-set "^2.0.2" + is-string "^1.0.7" + isarray "^2.0.5" + stop-iteration-iterator "^1.0.0" es-shim-unscopables@^1.0.0: version "1.0.0" @@ -7946,12 +5656,40 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" +esbuild@^0.18.10: + version "0.18.20" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.18.20.tgz#4709f5a34801b43b799ab7d6d82f7284a9b7a7a6" + integrity sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA== + optionalDependencies: + "@esbuild/android-arm" "0.18.20" + "@esbuild/android-arm64" "0.18.20" + "@esbuild/android-x64" "0.18.20" + "@esbuild/darwin-arm64" "0.18.20" + "@esbuild/darwin-x64" "0.18.20" + "@esbuild/freebsd-arm64" "0.18.20" + "@esbuild/freebsd-x64" "0.18.20" + "@esbuild/linux-arm" "0.18.20" + "@esbuild/linux-arm64" "0.18.20" + "@esbuild/linux-ia32" "0.18.20" + "@esbuild/linux-loong64" "0.18.20" + "@esbuild/linux-mips64el" "0.18.20" + "@esbuild/linux-ppc64" "0.18.20" + "@esbuild/linux-riscv64" "0.18.20" + "@esbuild/linux-s390x" "0.18.20" + "@esbuild/linux-x64" "0.18.20" + "@esbuild/netbsd-x64" "0.18.20" + "@esbuild/openbsd-x64" "0.18.20" + "@esbuild/sunos-x64" "0.18.20" + "@esbuild/win32-arm64" "0.18.20" + "@esbuild/win32-ia32" "0.18.20" + "@esbuild/win32-x64" "0.18.20" + escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== -escape-html@^1.0.3, escape-html@~1.0.3: +escape-html@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== @@ -7961,28 +5699,11 @@ escape-string-regexp@^1.0.5: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== -escodegen@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-2.0.0.tgz#5e32b12833e8aa8fa35e1bf0befa89380484c7dd" - integrity sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw== - dependencies: - esprima "^4.0.1" - estraverse "^5.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - eslint-config-airbnb-base@^15.0.0: version "15.0.0" resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz#6b09add90ac79c2f8d723a2580e07f3925afd236" @@ -8014,26 +5735,6 @@ eslint-config-prettier@^8.8.0: resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz#bfda738d412adc917fd7b038857110efe98c9348" integrity sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA== -eslint-config-react-app@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4" - integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA== - dependencies: - "@babel/core" "^7.16.0" - "@babel/eslint-parser" "^7.16.3" - "@rushstack/eslint-patch" "^1.1.0" - "@typescript-eslint/eslint-plugin" "^5.5.0" - "@typescript-eslint/parser" "^5.5.0" - babel-preset-react-app "^10.0.1" - confusing-browser-globals "^1.0.11" - eslint-plugin-flowtype "^8.0.3" - eslint-plugin-import "^2.25.3" - eslint-plugin-jest "^25.3.0" - eslint-plugin-jsx-a11y "^6.5.1" - eslint-plugin-react "^7.27.1" - eslint-plugin-react-hooks "^4.3.0" - eslint-plugin-testing-library "^5.0.1" - eslint-import-resolver-node@^0.3.7: version "0.3.7" resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz#83b375187d412324a1963d84fa664377a23eb4d7" @@ -8050,14 +5751,6 @@ eslint-module-utils@^2.7.4: dependencies: debug "^3.2.7" -eslint-plugin-flowtype@^8.0.3: - version "8.0.3" - resolved "https://registry.yarnpkg.com/eslint-plugin-flowtype/-/eslint-plugin-flowtype-8.0.3.tgz#e1557e37118f24734aa3122e7536a038d34a4912" - integrity sha512-dX8l6qUL6O+fYPtpNRideCFSpmWOUVx5QcaGLVqe/vlDiBSe4vYljDWDETwnyFzpl7By/WVIu6rcrniCgH9BqQ== - dependencies: - lodash "^4.17.21" - string-natural-compare "^3.0.1" - eslint-plugin-import@^2.25.3: version "2.27.5" resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz#876a6d03f52608a3e5bb439c2550588e51dd6c65" @@ -8079,13 +5772,6 @@ eslint-plugin-import@^2.25.3: semver "^6.3.0" tsconfig-paths "^3.14.1" -eslint-plugin-jest@^25.3.0: - version "25.7.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-25.7.0.tgz#ff4ac97520b53a96187bad9c9814e7d00de09a6a" - integrity sha512-PWLUEXeeF7C9QGKqvdSbzLOiLTx+bno7/HC9eefePfEb257QFHg7ye3dh80AZVkaa/RQsBB1Q/ORQvg2X7F0NQ== - dependencies: - "@typescript-eslint/experimental-utils" "^5.0.0" - eslint-plugin-jsx-a11y@^6.5.1: version "6.7.1" resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz#fca5e02d115f48c9a597a6894d5bcec2f7a76976" @@ -8113,7 +5799,12 @@ eslint-plugin-react-hooks@^4.3.0: resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== -eslint-plugin-react@^7.27.1, eslint-plugin-react@^7.28.0: +eslint-plugin-react-refresh@^0.4.6: + version "0.4.6" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.6.tgz#e8e8accab681861baed00c5c12da70267db0936f" + integrity sha512-NjGXdm7zgcKRkKMua34qVO9doI7VOxZ6ancSvBELJSSoX97jyndXcSoa8XBh69JoB31dNz3EEzlMcizZl7LaMA== + +eslint-plugin-react@^7.28.0: version "7.32.2" resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz#e71f21c7c265ebce01bcbc9d0955170c55571f10" integrity sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg== @@ -8134,14 +5825,14 @@ eslint-plugin-react@^7.27.1, eslint-plugin-react@^7.28.0: semver "^6.3.0" string.prototype.matchall "^4.0.8" -eslint-plugin-testing-library@^5.0.1: - version "5.11.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-testing-library/-/eslint-plugin-testing-library-5.11.0.tgz#0bad7668e216e20dd12f8c3652ca353009163121" - integrity sha512-ELY7Gefo+61OfXKlQeXNIDVVLPcvKTeiQOoMZG9TeuWa7Ln4dUNRv8JdRWBQI9Mbb427XGlVB1aa1QPZxBJM8Q== +eslint-plugin-vitest@^0.3.17: + version "0.3.17" + resolved "https://registry.yarnpkg.com/eslint-plugin-vitest/-/eslint-plugin-vitest-0.3.17.tgz#bf68b74c9c9657839aff5e9c28fadca9ea8f6ed7" + integrity sha512-JzljEhaJ3YDNJc4n2VTlOdMhElwLsQQprVtgY+eoKQkearKiFP53Vw3515J3jb4ZM8TVnpk7UsIFXM0gbhz+vQ== dependencies: - "@typescript-eslint/utils" "^5.58.0" + "@typescript-eslint/utils" "^6.13.2" -eslint-scope@5.1.1, eslint-scope@^5.1.1: +eslint-scope@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== @@ -8157,28 +5848,12 @@ eslint-scope@^7.2.0: esrecurse "^4.3.0" estraverse "^5.2.0" -eslint-visitor-keys@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" - integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== - eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1: version "3.4.1" resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz#c22c48f48942d08ca824cc526211ae400478a994" integrity sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA== -eslint-webpack-plugin@^3.1.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/eslint-webpack-plugin/-/eslint-webpack-plugin-3.2.0.tgz#1978cdb9edc461e4b0195a20da950cf57988347c" - integrity sha512-avrKcGncpPbPSUHX6B3stNGzkKFto3eL+DKM4+VyMrVnhPc3vRczVlCq3uhuFOdRvDHTVXuzwk1ZKUrqDQHQ9w== - dependencies: - "@types/eslint" "^7.29.0 || ^8.4.1" - jest-worker "^28.0.2" - micromatch "^4.0.5" - normalize-path "^3.0.0" - schema-utils "^4.0.0" - -eslint@^8.2.0, eslint@^8.3.0: +eslint@^8.2.0: version "8.43.0" resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.43.0.tgz#3e8c6066a57097adfd9d390b8fc93075f257a094" integrity sha512-aaCpf2JqqKesMFGgmRPessmVKjcGXqdlAYLLC3THM8t5nBRZRQ+st5WM/hoJXkdioEXLLbXgclUpM0TXo5HX5Q== @@ -8232,11 +5907,6 @@ espree@^9.5.2: acorn-jsx "^5.3.2" eslint-visitor-keys "^3.4.1" -esprima@^4.0.0, esprima@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - esquery@^1.4.2: version "1.5.0" resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" @@ -8261,139 +5931,16 @@ estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== -estree-walker@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-1.0.1.tgz#31bc5d612c96b704106b477e6dd5d8aa138cb700" - integrity sha512-1fMXF3YP4pZZVozF8j/ZLfvnR8NSIljt56UhbZ5PeeDmmGHpgpdwQt7ITlGvYaQukCvuBRMLEiKiYC+oeIg4cg== +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -etag@~1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== - -event-stream@=3.3.4: - version "3.3.4" - resolved "https://registry.npmjs.org/event-stream/-/event-stream-3.3.4.tgz#4ab4c9a0f5a54db9338b4c34d86bfce8f4b35571" - integrity sha1-SrTJoPWlTbkzi0w02Gv86PSzVXE= - dependencies: - duplexer "~0.1.1" - from "~0" - map-stream "~0.1.0" - pause-stream "0.0.11" - split "0.3" - stream-combiner "~0.0.4" - through "~2.3.1" - -eventemitter3@^4.0.0: - version "4.0.7" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" - integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== - -events@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" - integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== - -execa@5.1.1, execa@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= - -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -expect@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/expect/-/expect-27.5.1.tgz#83ce59f1e5bdf5f9d2b94b61d2050db48f3fef74" - integrity sha512-E1q5hSUG2AmYQwQJ041nvgpkODHQvB+RKlB4IYdru6uJsyFTRyZAP463M+1lINorwbqAmUggi6+WwkD8lCS/Dw== - dependencies: - "@jest/types" "^27.5.1" - jest-get-type "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - -express@^4.17.3: - version "4.18.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" - integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== - dependencies: - accepts "~1.3.8" - array-flatten "1.1.1" - body-parser "1.20.1" - content-disposition "0.5.4" - content-type "~1.0.4" - cookie "0.5.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "2.0.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "1.2.0" - fresh "0.5.2" - http-errors "2.0.0" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "2.4.1" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.7" - qs "6.11.0" - range-parser "~1.2.1" - safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" - setprototypeof "1.2.0" - statuses "2.0.1" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" @@ -8408,20 +5955,6 @@ external-editor@^3.0.3: iconv-lite "^0.4.24" tmp "^0.0.33" -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - extract-domain@2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/extract-domain/-/extract-domain-2.2.1.tgz#1deeae633a5cbf05ae2fd7b3ff87cb98cbc4cb5b" @@ -8442,11 +5975,6 @@ extsprintf@^1.2.0: resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= -fake-xml-http-request@^2.1.1: - version "2.1.2" - resolved "https://registry.npmjs.org/fake-xml-http-request/-/fake-xml-http-request-2.1.2.tgz#f1786720cae50bbb46273035a0173414f3e85e74" - integrity sha512-HaFMBi7r+oEC9iJNpc3bvcW7Z7iLmM26hPDmlb0mFwyANSsOQAtJxbdWsXITKOzZUyMYK0zYCv3h5yDj9TsiXg== - faker@5.5.3: version "5.5.3" resolved "https://registry.npmjs.org/faker/-/faker-5.5.3.tgz#c57974ee484431b25205c2c8dc09fda861e51e0e" @@ -8473,7 +6001,7 @@ fast-glob@^3.2.11: merge2 "^1.3.0" micromatch "^4.0.4" -fast-glob@^3.2.12, fast-glob@^3.2.9: +fast-glob@^3.2.9: version "3.2.12" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== @@ -8484,12 +6012,12 @@ fast-glob@^3.2.12, fast-glob@^3.2.9: merge2 "^1.3.0" micromatch "^4.0.4" -fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: +fast-json-stable-stringify@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: +fast-levenshtein@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= @@ -8537,13 +6065,6 @@ fault@^1.0.0: dependencies: format "^0.2.0" -faye-websocket@^0.11.3: - version "0.11.4" - resolved "https://registry.yarnpkg.com/faye-websocket/-/faye-websocket-0.11.4.tgz#7f0d9275cfdd86a1c963dc8b65fcc451edcbb1da" - integrity sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g== - dependencies: - websocket-driver ">=0.5.1" - fb-watchman@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" @@ -8583,14 +6104,6 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" -file-loader@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d" - integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== - dependencies: - loader-utils "^2.0.0" - schema-utils "^3.0.0" - filelist@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" @@ -8598,21 +6111,6 @@ filelist@^1.0.4: dependencies: minimatch "^5.0.1" -filesize@^8.0.6: - version "8.0.7" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-8.0.7.tgz#695e70d80f4e47012c132d57a059e80c6b580bd8" - integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -8625,41 +6123,12 @@ filter-obj@^1.1.0: resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b" integrity sha1-mzERErxsYSehbgFsbF1/GeCAXFs= -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "2.4.1" - parseurl "~1.3.3" - statuses "2.0.1" - unpipe "~1.0.0" - -find-cache-dir@^3.3.1: - version "3.3.1" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.1.tgz#89b33fad4a4670daa94f855f7fbe31d6d84fe880" - integrity sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ== - dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" - find-root@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -find-up@^4.0.0, find-up@^4.1.0: +find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== @@ -8675,23 +6144,6 @@ find-up@^5.0.0: locate-path "^6.0.0" path-exists "^4.0.0" -find-webpack@2.2.1: - version "2.2.1" - resolved "https://registry.npmjs.org/find-webpack/-/find-webpack-2.2.1.tgz#96e7b701a2d37c3500cae30d4dc59e14923ba460" - integrity sha512-OdDtn2AzQvu3l9U1TS5ALc7uTVcLK/yv3fhjo+Pz7yuv4hG3ANKnbkKnPIPZ5ofd9mpYe6wRf5g5H4X9Lx48vQ== - dependencies: - debug "4.1.1" - find-yarn-workspace-root "1.2.1" - mocked-env "1.3.2" - -find-yarn-workspace-root@1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-1.2.1.tgz#40eb8e6e7c2502ddfaa2577c176f221422f860db" - integrity sha512-dVtfb0WuQG+8Ag2uWkbG79hOUzEsRrhBzgfn86g2sJPkzmcpGdghbNTfUKGTxymFrY/tLIodDzLoW9nOJ4FY8Q== - dependencies: - fs-extra "^4.0.3" - micromatch "^3.1.4" - flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" @@ -8717,54 +6169,18 @@ focus-outline-manager@^1.0.2: resolved "https://registry.yarnpkg.com/focus-outline-manager/-/focus-outline-manager-1.0.2.tgz#7bf3658865341fb6b08d042a037b9d2868b119b5" integrity sha512-bHWEmjLsTjGP9gVs7P3Hyl+oY5NlMW8aTSPdTJ+X2GKt6glDctt9fUCLbRV+d/l8NDC40+FxMjp9WlTQXaQALw== -follow-redirects@^1.0.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.0.tgz#06441868281c86d0dda4ad8bdaead2d02dca89d4" - integrity sha512-aExlJShTV4qOUOL7yF1U5tvLCB0xQuudbf6toyYA0E/acBNw71mvjFTnLaRp50aQaYocMR0a/RMMBIHeZnGyjQ== - -follow-redirects@^1.15.0: - version "1.15.3" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" - integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== - -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= +for-each@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== + dependencies: + is-callable "^1.1.3" forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= -fork-ts-checker-webpack-plugin@^6.5.0: - version "6.5.3" - resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz#eda2eff6e22476a2688d10661688c47f611b37f3" - integrity sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ== - dependencies: - "@babel/code-frame" "^7.8.3" - "@types/json-schema" "^7.0.5" - chalk "^4.1.0" - chokidar "^3.4.2" - cosmiconfig "^6.0.0" - deepmerge "^4.2.2" - fs-extra "^9.0.0" - glob "^7.1.6" - memfs "^3.1.2" - minimatch "^3.0.4" - schema-utils "2.7.0" - semver "^7.3.2" - tapable "^1.0.0" - -form-data@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" - integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.8" - mime-types "^2.1.12" - form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" @@ -8788,81 +6204,35 @@ format@^0.2.0: resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" integrity sha1-1hcBB+nv3E7TDJ3DkBbflCtctYs= -forwarded@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" - integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== - -fraction.js@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= +fs-extra@^11.1.0: + version "11.1.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.1.1.tgz#da69f7c39f3b002378b0954bb6ae7efdc0876e2d" + integrity sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ== dependencies: - map-cache "^0.2.2" - -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== - -from@~0: - version "0.1.7" - resolved "https://registry.npmjs.org/from/-/from-0.1.7.tgz#83c60afc58b9c56997007ed1a768b3ab303a44fe" - integrity sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4= - -fs-extra@^10.0.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" - integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== - dependencies: - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-extra@^4.0.3: - version "4.0.3" - resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94" - integrity sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg== - dependencies: - graceful-fs "^4.1.2" - jsonfile "^4.0.0" - universalify "^0.1.0" - -fs-extra@^9.0.0, fs-extra@^9.0.1: - version "9.1.0" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" - integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== - dependencies: - at-least-node "^1.0.0" - graceful-fs "^4.2.0" - jsonfile "^6.0.1" - universalify "^2.0.0" - -fs-monkey@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.4.tgz#ee8c1b53d3fe8bb7e5d2c5c5dfc0168afdd2f747" - integrity sha512-INM/fWAxMICjttnD0DX1rBvinKskj5G1w+oy/pnm9u/tSlnBrzFonJMcalKJ30P8RRsPzKcCG7Q8l0jx5Fh9YQ== + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= -fsevents@^2.3.2, fsevents@~2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== +fsevents@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== function-bind@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -8873,7 +6243,7 @@ function.prototype.name@^1.1.5: es-abstract "^1.19.0" functions-have-names "^1.2.2" -functions-have-names@^1.2.2: +functions-have-names@^1.2.2, functions-have-names@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== @@ -8893,6 +6263,11 @@ get-dom-document@^0.1.3: resolved "https://registry.yarnpkg.com/get-dom-document/-/get-dom-document-0.1.3.tgz#d0188090e43d38dd146c467ac6e3e1f2ace7af52" integrity sha512-bZ0O00gSQgMo+wz7gU6kbbWCPh4dfDsL9ZOmVhA8TOXszl5GV56TpTuW1/Qq/QctgpjK56yyvB1vBO+wzz8Szw== +get-func-name@^2.0.1, get-func-name@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.2.tgz#0d7cf20cd13fda808669ffa88f4ffc7a3943fc41" + integrity sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ== + get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" @@ -8902,20 +6277,15 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@ has "^1.0.3" has-symbols "^1.0.3" -get-own-enumerable-property-symbols@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" - integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^6.0.0: - version "6.0.1" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== +get-intrinsic@^1.2.0, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" + integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== + dependencies: + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" get-symbol-description@^1.0.0: version "1.0.0" @@ -8925,11 +6295,6 @@ get-symbol-description@^1.0.0: call-bind "^1.0.2" get-intrinsic "^1.1.1" -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -8944,31 +6309,14 @@ glob-parent@^5.1.2, glob-parent@~5.1.2: dependencies: is-glob "^4.0.1" -glob-parent@^6.0.1, glob-parent@^6.0.2: +glob-parent@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: is-glob "^4.0.3" -glob-to-regexp@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" - integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== - -glob@7.1.6: - version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -glob@^7.0.5, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: +glob@^7.0.5, glob@^7.1.1, glob@^7.1.3, glob@^7.1.6: version "7.2.0" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== @@ -8980,22 +6328,6 @@ glob@^7.0.5, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: once "^1.3.0" path-is-absolute "^1.0.0" -global-modules@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" - integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== - dependencies: - global-prefix "^3.0.0" - -global-prefix@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-3.0.0.tgz#fc85f73064df69f50421f47f883fe5b913ba9b97" - integrity sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg== - dependencies: - ini "^1.3.5" - kind-of "^6.0.2" - which "^1.3.1" - globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" @@ -9008,7 +6340,7 @@ globals@^13.19.0: dependencies: type-fest "^0.20.2" -globby@^11.0.3, globby@^11.0.4, globby@^11.1.0: +globby@^11.0.3, globby@^11.1.0: version "11.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== @@ -9020,18 +6352,14 @@ globby@^11.0.3, globby@^11.0.4, globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" -globby@^13.1.1: - version "13.2.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.0.tgz#7dd5678d765c4680c2e6d106230d86cb727cb1af" - integrity sha512-jWsQfayf13NvqKUIL3Ta+CIqMnvlaIDFveWE/dpOZ9+3AMEJozsxDvKA02zync9UuvOM8rOXzsD5GqKP4OnWPQ== +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== dependencies: - dir-glob "^3.0.1" - fast-glob "^3.2.11" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^4.0.0" + get-intrinsic "^1.1.3" -graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -9098,7 +6426,7 @@ graphql.macro@^1.4.2: babel-plugin-macros "^2.5.0" graphql-tag "^2.10.1" -graphql@*, graphql@^15.0.0, graphql@^15.5.0: +graphql@*, graphql@^15.5.0: version "15.5.1" resolved "https://registry.npmjs.org/graphql/-/graphql-15.5.1.tgz#f2f84415d8985e7b84731e7f3536f8bb9d383aad" integrity sha512-FeTRX67T3LoE3LWAxxOlW2K3Bz+rMYAC18rRguK4wgXaTZMiJwSUwDmPFo3UadAKbzirKIg5Qy+sNJXbpPRnQw== @@ -9110,11 +6438,6 @@ gzip-size@^6.0.0: dependencies: duplexer "^0.1.2" -handle-thing@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" - integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== - har-schema@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" @@ -9128,11 +6451,6 @@ har-validator@~5.1.3: ajv "^6.12.3" har-schema "^2.0.0" -harmony-reflect@^1.4.6: - version "1.6.2" - resolved "https://registry.yarnpkg.com/harmony-reflect/-/harmony-reflect-1.6.2.tgz#31ecbd32e648a34d030d86adb67d4d47547fe710" - integrity sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== - has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -9155,7 +6473,12 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" -has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: +has-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + +has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== @@ -9167,37 +6490,6 @@ has-tostringtag@^1.0.0: dependencies: has-symbols "^1.0.2" -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - has@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" @@ -9205,6 +6497,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +hasown@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" + integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== + dependencies: + function-bind "^1.1.2" + hast-to-hyperscript@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" @@ -9305,11 +6604,6 @@ hastscript@^6.0.0: property-information "^5.0.0" space-separated-tokens "^1.0.0" -he@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" - integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== - header-case@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/header-case/-/header-case-2.0.4.tgz#5a42e63b55177349cf405beb8d775acabb92c063" @@ -9349,114 +6643,24 @@ hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.1.0, hoist-non-react- dependencies: react-is "^16.7.0" -hoopy@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d" - integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== - -hpack.js@^2.1.6: - version "2.1.6" - resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" - integrity sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI= - dependencies: - inherits "^2.0.1" - obuf "^1.0.0" - readable-stream "^2.0.1" - wbuf "^1.1.0" - -html-encoding-sniffer@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-2.0.1.tgz#42a6dc4fd33f00281176e8b23759ca4e4fa185f3" - integrity sha512-D5JbOMBIR/TVZkubHT+OyT2705QvogUW4IBn6nHd756OwieSF9aDYFj4dv6HHEVGYbHaLETa3WggZYWWMyy3ZQ== - dependencies: - whatwg-encoding "^1.0.5" - -html-entities@^2.1.0, html-entities@^2.3.2: - version "2.4.0" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.4.0.tgz#edd0cee70402584c8c76cc2c0556db09d1f45061" - integrity sha512-igBTJcNNNhvZFRtm8uA6xMY6xYleeDwn3PeBCkDz7tHttv4F2hsDI2aPgNERWzvRcNYHNT3ymRaQzllmXj4YsQ== - -html-escaper@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -html-minifier-terser@^6.0.2: - version "6.1.0" - resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#bfc818934cc07918f6b3669f5774ecdfd48f32ab" - integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== +html-encoding-sniffer@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz#2cb1a8cf0db52414776e5b2a7a04d5dd98158de9" + integrity sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA== dependencies: - camel-case "^4.1.2" - clean-css "^5.2.2" - commander "^8.3.0" - he "^1.2.0" - param-case "^3.0.4" - relateurl "^0.2.7" - terser "^5.10.0" + whatwg-encoding "^2.0.0" html-void-elements@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== -html-webpack-plugin@^5.5.0: - version "5.5.3" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.3.tgz#72270f4a78e222b5825b296e5e3e1328ad525a3e" - integrity sha512-6YrDKTuqaP/TquFH7h4srYWsZx+x6k6+FbsTm0ziCwGHDP78Unr1r9F/H4+sGmMbX08GQcJ+K64x55b+7VM/jg== - dependencies: - "@types/html-minifier-terser" "^6.0.0" - html-minifier-terser "^6.0.2" - lodash "^4.17.21" - pretty-error "^4.0.0" - tapable "^2.0.0" - -htmlparser2@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" - integrity sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A== - dependencies: - domelementtype "^2.0.1" - domhandler "^4.0.0" - domutils "^2.5.2" - entities "^2.0.0" - -http-deceiver@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" - integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= - -http-errors@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" - integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== - dependencies: - depd "2.0.0" - inherits "2.0.4" - setprototypeof "1.2.0" - statuses "2.0.1" - toidentifier "1.0.1" - -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - integrity sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0= - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-parser-js@>=0.5.1: - version "0.5.3" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.3.tgz#01d2709c79d41698bb01d4decc5e9da4e4a033d9" - integrity sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg== - -http-proxy-agent@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" - integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== +http-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43" + integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w== dependencies: - "@tootallnate/once" "1" + "@tootallnate/once" "2" agent-base "6" debug "4" @@ -9468,37 +6672,6 @@ http-proxy-agent@^7.0.0: agent-base "^7.1.0" debug "^4.3.4" -http-proxy-middleware@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.0.tgz#20d1ac3409199c83e5d0383ba6436b04e7acb9fe" - integrity sha512-S+RN5njuyvYV760aiVKnyuTXqUMcSIvYOsHA891DOVQyrdZOwaXtBHpt9FUVPEDAsOvsPArZp6VXQLs44yvkow== - dependencies: - "@types/http-proxy" "^1.17.5" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy-middleware@^2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz#e1a4dd6979572c7ab5a4e4b55095d1f32a74963f" - integrity sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw== - dependencies: - "@types/http-proxy" "^1.17.8" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy@^1.18.1: - version "1.18.1" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" - integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== - dependencies: - eventemitter3 "^4.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" @@ -9508,10 +6681,10 @@ http-signature@~1.2.0: jsprim "^1.2.2" sshpk "^1.7.0" -https-proxy-agent@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" - integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== +https-proxy-agent@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== dependencies: agent-base "6" debug "4" @@ -9524,34 +6697,24 @@ https-proxy-agent@^7.0.0: agent-base "^7.0.2" debug "4" -human-signals@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - hyphenate-style-name@^1.0.3: version "1.0.4" resolved "https://registry.yarnpkg.com/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz#691879af8e220aea5750e8827db4ef62a54e361d" integrity sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ== -iconv-lite@0.4.24, iconv-lite@^0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -iconv-lite@^0.6.3: +iconv-lite@0.6.3, iconv-lite@^0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== dependencies: safer-buffer ">= 2.1.2 < 3.0.0" -icss-utils@^5.0.0, icss-utils@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" - integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== +iconv-lite@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" idb-keyval@^5.0.2: version "5.1.5" @@ -9560,18 +6723,6 @@ idb-keyval@^5.0.2: dependencies: safari-14-idb-fix "^1.0.6" -idb@^7.0.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/idb/-/idb-7.1.1.tgz#d910ded866d32c7ced9befc5bfdf36f572ced72b" - integrity sha512-gchesWBzyvGHRO9W8tzUWFDycow5gwjvFKfyV9FF32Y7F50yZMp7mP+T2mJIWFx49zicqyC4uefHM17o6xKIVQ== - -identity-obj-proxy@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz#94d2bda96084453ef36fbc5aaec37e0f79f1fc14" - integrity sha512-00n6YnVHKrinT9t0d9+5yZC6UBNJANpYEQvL2LlX6Ab9lnmxzIRcEmTPuyGScvl1+jKuCICX1Z0Ab1pPKKdikA== - dependencies: - harmony-reflect "^1.4.6" - ieee754@^1.1.13: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" @@ -9585,12 +6736,7 @@ ignore@^5.2.0: image-size@~0.5.0: version "0.5.5" resolved "https://registry.yarnpkg.com/image-size/-/image-size-0.5.5.tgz#09dfd4ab9d20e29eb1c3e80b8990378df9e3cb9c" - integrity sha1-Cd/Uq50g4p6xw+gLiZA3jfnjy5w= - -immer@^9.0.7: - version "9.0.21" - resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176" - integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== + integrity sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ== immutable@~3.7.6: version "3.7.6" @@ -9610,14 +6756,6 @@ import-from@4.0.0: resolved "https://registry.yarnpkg.com/import-from/-/import-from-4.0.0.tgz#2710b8d66817d232e16f4166e319248d3d5492e2" integrity sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ== -import-local@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.2.tgz#a8cfd0431d1de4a2199703d003e3e62364fa6db6" - integrity sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" @@ -9628,11 +6766,6 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -inflected@^2.0.4: - version "2.1.0" - resolved "https://registry.npmjs.org/inflected/-/inflected-2.1.0.tgz#2816ac17a570bbbc8303ca05bca8bf9b3f959687" - integrity sha512-hAEKNxvHf2Iq3H60oMBHkB4wl5jn3TPF3+fXek/sRwAB5gP9xWs4r7aweSF95f99HFoz69pnZTcu8f0SIHV18w== - inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" @@ -9641,21 +6774,11 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: +inherits@2, inherits@^2.0.3, inherits@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= - -ini@^1.3.5: - version "1.3.8" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" - integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== - inline-style-parser@0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" @@ -9699,6 +6822,15 @@ internal-slot@^1.0.3: has "^1.0.3" side-channel "^1.0.4" +internal-slot@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.6.tgz#37e756098c4911c5e912b8edbf71ed3aa116f930" + integrity sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg== + dependencies: + get-intrinsic "^1.2.2" + hasown "^2.0.0" + side-channel "^1.0.4" + "internmap@1 - 2", internmap@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" @@ -9711,16 +6843,6 @@ invariant@^2.2.4: dependencies: loose-envify "^1.0.0" -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -ipaddr.js@^2.0.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" - integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== - is-absolute@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576" @@ -9729,20 +6851,6 @@ is-absolute@^1.0.0: is-relative "^1.0.0" is-windows "^1.0.1" -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - is-alphabetical@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d" @@ -9756,6 +6864,23 @@ is-alphanumerical@^1.0.0: is-alphabetical "^1.0.0" is-decimal "^1.0.0" +is-arguments@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" + integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.2.0" + is-typed-array "^1.1.10" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -9780,17 +6905,12 @@ is-boolean-object@^1.1.0: dependencies: call-bind "^1.0.2" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-buffer@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== -is-callable@^1.1.4, is-callable@^1.2.7: +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: version "1.2.7" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== @@ -9802,59 +6922,29 @@ is-core-module@^2.11.0, is-core-module@^2.9.0: dependencies: has "^1.0.3" -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - is-date-object@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.4.tgz#550cfcc03afada05eea3dd30981c7b09551f73e5" integrity sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A== +is-date-object@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + is-decimal@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5" integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-docker@^2.0.0, is-docker@^2.1.1: +is-docker@^2.0.0: version "2.2.1" resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.0, is-extendable@^1.0.1: +is-extendable@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== @@ -9883,11 +6973,6 @@ is-fullwidth-code-point@^3.0.0: resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - is-glob@4.0.3, is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" @@ -9919,10 +7004,10 @@ is-lower-case@^2.0.2: dependencies: tslib "^2.0.3" -is-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" - integrity sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE= +is-map@^2.0.1, is-map@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" + integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== is-negative-zero@^2.0.2: version "2.0.2" @@ -9934,23 +7019,11 @@ is-number-object@^1.0.4: resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.5.tgz#6edfaeed7950cff19afedce9fbfca9ee6dd289eb" integrity sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw== -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -is-obj@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - integrity sha1-PkcprB9f3gJc19g6iW2rn09n2w8= - is-path-inside@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -9961,12 +7034,7 @@ is-plain-obj@^2.0.0: resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== -is-plain-obj@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" - integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== - -is-plain-object@^2.0.3, is-plain-object@^2.0.4: +is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== @@ -9986,11 +7054,6 @@ is-regex@^1.1.4: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-regexp@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" - integrity sha1-/S2INUXEa6xaYz57mgnof6LLUGk= - is-relative@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d" @@ -9998,10 +7061,10 @@ is-relative@^1.0.0: dependencies: is-unc-path "^1.0.0" -is-root@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" - integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== +is-set@^2.0.1, is-set@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" + integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== is-shared-array-buffer@^1.0.2: version "1.0.2" @@ -10010,11 +7073,6 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" - integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== - is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" @@ -10029,7 +7087,14 @@ is-symbol@^1.0.2, is-symbol@^1.0.3: dependencies: has-symbols "^1.0.2" -is-typedarray@^1.0.0, is-typedarray@~1.0.0: +is-typed-array@^1.1.10: + version "1.1.12" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" + integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== + dependencies: + which-typed-array "^1.1.11" + +is-typedarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= @@ -10053,6 +7118,11 @@ is-upper-case@^2.0.2: dependencies: tslib "^2.0.3" +is-weakmap@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" + integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== + is-weakref@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" @@ -10060,17 +7130,25 @@ is-weakref@^1.0.2: dependencies: call-bind "^1.0.2" -is-what@^3.12.0: +is-weakset@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.2.tgz#4569d67a747a1ce5a994dfd4ef6dcea76e7c0a1d" + integrity sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +is-what@^3.14.1: version "3.14.1" resolved "https://registry.yarnpkg.com/is-what/-/is-what-3.14.1.tgz#e1222f46ddda85dead0fd1c9df131760e77755c1" integrity sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA== -is-windows@^1.0.1, is-windows@^1.0.2: +is-windows@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== -is-wsl@^2.1.1, is-wsl@^2.2.0: +is-wsl@^2.1.1: version "2.2.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== @@ -10082,24 +7160,17 @@ isarray@0.0.1: resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= -isarray@1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= +isarray@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: +isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= @@ -10119,48 +7190,6 @@ isstream@~0.1.2: resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== - -istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" - integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9" - integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.5" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" - integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - jake@^10.8.5: version "10.8.7" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" @@ -10171,523 +7200,15 @@ jake@^10.8.5: filelist "^1.0.4" minimatch "^3.1.2" -jest-changed-files@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-27.5.1.tgz#a348aed00ec9bf671cc58a66fcbe7c3dfd6a68f5" - integrity sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== - dependencies: - "@jest/types" "^27.5.1" - execa "^5.0.0" - throat "^6.0.1" +jiti@^1.17.1: + version "1.21.0" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d" + integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== -jest-circus@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-27.5.1.tgz#37a5a4459b7bf4406e53d637b49d22c65d125ecc" - integrity sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^0.7.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - throat "^6.0.1" - -jest-cli@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-27.5.1.tgz#278794a6e6458ea8029547e6c6cbf673bd30b145" - integrity sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== - dependencies: - "@jest/core" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - import-local "^3.0.2" - jest-config "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - prompts "^2.0.1" - yargs "^16.2.0" - -jest-config@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-27.5.1.tgz#5c387de33dca3f99ad6357ddeccd91bf3a0e4a41" - integrity sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== - dependencies: - "@babel/core" "^7.8.0" - "@jest/test-sequencer" "^27.5.1" - "@jest/types" "^27.5.1" - babel-jest "^27.5.1" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.1" - graceful-fs "^4.2.9" - jest-circus "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-get-type "^27.5.1" - jest-jasmine2 "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runner "^27.5.1" - jest-util "^27.5.1" - jest-validate "^27.5.1" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^27.5.1" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^26.0.0: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-26.6.2.tgz#1aa7468b52c3a68d7d5c5fdcdfcd5e49bd164394" - integrity sha512-6m+9Z3Gv9wN0WFVasqjCL/06+EFCMTqDEUl/b87HYK2rAPTyfz4ZIuSlPhY51PIQRWx5TaxeF1qmXKe9gfN3sA== - dependencies: - chalk "^4.0.0" - diff-sequences "^26.6.2" - jest-get-type "^26.3.0" - pretty-format "^26.6.2" - -jest-diff@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-27.5.1.tgz#a07f5011ac9e6643cf8a95a462b7b1ecf6680def" - integrity sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== - dependencies: - chalk "^4.0.0" - diff-sequences "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-docblock@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-27.5.1.tgz#14092f364a42c6108d42c33c8cf30e058e25f6c0" - integrity sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== - dependencies: - detect-newline "^3.0.0" - -jest-each@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-27.5.1.tgz#5bc87016f45ed9507fed6e4702a5b468a5b2c44e" - integrity sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - jest-get-type "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - -jest-environment-jsdom@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-27.5.1.tgz#ea9ccd1fc610209655a77898f86b2b559516a546" - integrity sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - jsdom "^16.6.0" - -jest-environment-node@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-27.5.1.tgz#dedc2cfe52fab6b8f5714b4808aefa85357a365e" - integrity sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - jest-mock "^27.5.1" - jest-util "^27.5.1" - -jest-get-type@^26.3.0: - version "26.3.0" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0" - integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig== - -jest-get-type@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-27.5.1.tgz#3cd613c507b0f7ace013df407a1c1cd578bcb4f1" - integrity sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== - -jest-haste-map@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-27.5.1.tgz#9fd8bd7e7b4fa502d9c6164c5640512b4e811e7f" - integrity sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== - dependencies: - "@jest/types" "^27.5.1" - "@types/graceful-fs" "^4.1.2" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^27.5.1" - jest-serializer "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - micromatch "^4.0.4" - walker "^1.0.7" - optionalDependencies: - fsevents "^2.3.2" - -jest-jasmine2@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-27.5.1.tgz#a037b0034ef49a9f3d71c4375a796f3b230d1ac4" - integrity sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - expect "^27.5.1" - is-generator-fn "^2.0.0" - jest-each "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-runtime "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - pretty-format "^27.5.1" - throat "^6.0.1" - -jest-leak-detector@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-27.5.1.tgz#6ec9d54c3579dd6e3e66d70e3498adf80fde3fb8" - integrity sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== - dependencies: - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-matcher-utils@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-27.5.1.tgz#9c0cdbda8245bc22d2331729d1091308b40cf8ab" - integrity sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== - dependencies: - chalk "^4.0.0" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - pretty-format "^27.5.1" - -jest-message-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-27.5.1.tgz#bdda72806da10d9ed6425e12afff38cd1458b6cf" - integrity sha512-rMyFe1+jnyAAf+NHwTclDz0eAaLkVDdKVHHBFWsBWHnnh5YeJMNWWsv7AbFYXfK3oTqvL7VTWkhNLu1jX24D+g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^27.5.1" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^27.5.1" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-message-util@^28.1.3: - version "28.1.3" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" - integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^28.1.3" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^28.1.3" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6" - integrity sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - -jest-pnp-resolver@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" - integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - -jest-regex-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-27.5.1.tgz#4da143f7e9fd1e542d4aa69617b38e4a78365b95" - integrity sha512-4bfKq2zie+x16okqDXjXn9ql2B0dScQu+vcwe4TvFVhkVyuWLqpZrZtXxLLWoXYgn0E87I6r6GRYHF7wFZBUvg== - -jest-regex-util@^28.0.0: - version "28.0.2" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" - integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== - -jest-resolve-dependencies@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-27.5.1.tgz#d811ecc8305e731cc86dd79741ee98fed06f1da8" - integrity sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== - dependencies: - "@jest/types" "^27.5.1" - jest-regex-util "^27.5.1" - jest-snapshot "^27.5.1" - -jest-resolve@^27.4.2, jest-resolve@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-27.5.1.tgz#a2f1c5a0796ec18fe9eb1536ac3814c23617b384" - integrity sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== - dependencies: - "@jest/types" "^27.5.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-pnp-resolver "^1.2.2" - jest-util "^27.5.1" - jest-validate "^27.5.1" - resolve "^1.20.0" - resolve.exports "^1.1.0" - slash "^3.0.0" - -jest-runner@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-27.5.1.tgz#071b27c1fa30d90540805c5645a0ec167c7b62e5" - integrity sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== - dependencies: - "@jest/console" "^27.5.1" - "@jest/environment" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.8.1" - graceful-fs "^4.2.9" - jest-docblock "^27.5.1" - jest-environment-jsdom "^27.5.1" - jest-environment-node "^27.5.1" - jest-haste-map "^27.5.1" - jest-leak-detector "^27.5.1" - jest-message-util "^27.5.1" - jest-resolve "^27.5.1" - jest-runtime "^27.5.1" - jest-util "^27.5.1" - jest-worker "^27.5.1" - source-map-support "^0.5.6" - throat "^6.0.1" - -jest-runtime@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-27.5.1.tgz#4896003d7a334f7e8e4a53ba93fb9bcd3db0a1af" - integrity sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== - dependencies: - "@jest/environment" "^27.5.1" - "@jest/fake-timers" "^27.5.1" - "@jest/globals" "^27.5.1" - "@jest/source-map" "^27.5.1" - "@jest/test-result" "^27.5.1" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - execa "^5.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^27.5.1" - jest-message-util "^27.5.1" - jest-mock "^27.5.1" - jest-regex-util "^27.5.1" - jest-resolve "^27.5.1" - jest-snapshot "^27.5.1" - jest-util "^27.5.1" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-serializer@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-27.5.1.tgz#81438410a30ea66fd57ff730835123dea1fb1f64" - integrity sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== - dependencies: - "@types/node" "*" - graceful-fs "^4.2.9" - -jest-snapshot@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-27.5.1.tgz#b668d50d23d38054a51b42c4039cab59ae6eb6a1" - integrity sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== - dependencies: - "@babel/core" "^7.7.2" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/traverse" "^7.7.2" - "@babel/types" "^7.0.0" - "@jest/transform" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/babel__traverse" "^7.0.4" - "@types/prettier" "^2.1.5" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^27.5.1" - graceful-fs "^4.2.9" - jest-diff "^27.5.1" - jest-get-type "^27.5.1" - jest-haste-map "^27.5.1" - jest-matcher-utils "^27.5.1" - jest-message-util "^27.5.1" - jest-util "^27.5.1" - natural-compare "^1.4.0" - pretty-format "^27.5.1" - semver "^7.3.2" - -jest-util@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-27.5.1.tgz#3ba9771e8e31a0b85da48fe0b0891fb86c01c2f9" - integrity sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== - dependencies: - "@jest/types" "^27.5.1" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-util@^28.1.3: - version "28.1.3" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" - integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== - dependencies: - "@jest/types" "^28.1.3" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-27.5.1.tgz#9197d54dc0bdb52260b8db40b46ae668e04df067" - integrity sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== - dependencies: - "@jest/types" "^27.5.1" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^27.5.1" - leven "^3.1.0" - pretty-format "^27.5.1" - -jest-watch-typeahead@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/jest-watch-typeahead/-/jest-watch-typeahead-1.1.0.tgz#b4a6826dfb9c9420da2f7bc900de59dad11266a9" - integrity sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== - dependencies: - ansi-escapes "^4.3.1" - chalk "^4.0.0" - jest-regex-util "^28.0.0" - jest-watcher "^28.0.0" - slash "^4.0.0" - string-length "^5.0.1" - strip-ansi "^7.0.1" - -jest-watcher@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-27.5.1.tgz#71bd85fb9bde3a2c2ec4dc353437971c43c642a2" - integrity sha512-z676SuD6Z8o8qbmEGhoEUFOM1+jfEiL3DXHK/xgEiG2EyNYfFG60jluWcupY6dATjfEsKQuibReS1djInQnoVw== - dependencies: - "@jest/test-result" "^27.5.1" - "@jest/types" "^27.5.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - jest-util "^27.5.1" - string-length "^4.0.1" - -jest-watcher@^28.0.0: - version "28.1.3" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" - integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== - dependencies: - "@jest/test-result" "^28.1.3" - "@jest/types" "^28.1.3" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.10.2" - jest-util "^28.1.3" - string-length "^4.0.1" - -jest-worker@^26.2.1: - version "26.6.2" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-26.6.2.tgz#7f72cbc4d643c365e27b9fd775f9d0eaa9c7a8ed" - integrity sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^7.0.0" - -jest-worker@^27.0.2, jest-worker@^27.4.5, jest-worker@^27.5.1: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.5.1.tgz#8d146f0900e8973b106b6f73cc1e9a8cb86f8db0" - integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest-worker@^28.0.2: - version "28.1.3" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" - integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^27.4.3: - version "27.5.1" - resolved "https://registry.yarnpkg.com/jest/-/jest-27.5.1.tgz#dadf33ba70a779be7a6fc33015843b51494f63fc" - integrity sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== - dependencies: - "@jest/core" "^27.5.1" - import-local "^3.0.2" - jest-cli "^27.5.1" - -jiti@^1.17.1: - version "1.21.0" - resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d" - integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== - -jiti@^1.18.2: - version "1.18.2" - resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" - integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== - -joi@^17.11.0: - version "17.11.0" - resolved "https://registry.yarnpkg.com/joi/-/joi-17.11.0.tgz#aa9da753578ec7720e6f0ca2c7046996ed04fc1a" - integrity sha512-NgB+lZLNoqISVy1rZocE9PZI36bL/77ie924Ri43yEvi9GUUMPeyVIr8KdFTMUlby1p0PBYMk9spIxEUQYqrJQ== - dependencies: - "@hapi/hoek" "^9.0.0" - "@hapi/topo" "^5.0.0" - "@sideway/address" "^4.1.3" - "@sideway/formula" "^3.0.1" - "@sideway/pinpoint" "^2.0.0" +jiti@^1.18.2: + version "1.18.2" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.18.2.tgz#80c3ef3d486ebf2450d9335122b32d121f2a83cd" + integrity sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg== jose@^5.0.0: version "5.1.3" @@ -10704,14 +7225,6 @@ js-cookie@^2.2.1: resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - js-yaml@^4.0.0, js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" @@ -10724,50 +7237,41 @@ jsbn@~0.1.0: resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= -jsdom@^16.6.0: - version "16.7.0" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-16.7.0.tgz#918ae71965424b197c819f8183a754e18977b710" - integrity sha512-u9Smc2G1USStM+s/x1ru5Sxrl6mPYCbByG1U/hUmqaVsm4tbNyS7CicOSRyuGQYZhTu0h84qkZZQ/I+dzizSVw== - dependencies: - abab "^2.0.5" - acorn "^8.2.4" - acorn-globals "^6.0.0" - cssom "^0.4.4" - cssstyle "^2.3.0" - data-urls "^2.0.0" - decimal.js "^10.2.1" - domexception "^2.0.1" - escodegen "^2.0.0" - form-data "^3.0.0" - html-encoding-sniffer "^2.0.1" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" +jsdom@^22.1.0: + version "22.1.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-22.1.0.tgz#0fca6d1a37fbeb7f4aac93d1090d782c56b611c8" + integrity sha512-/9AVW7xNbsBv6GfWho4TTNjEo9fe6Zhf9O7s0Fhhr3u+awPwAJMKwAMXnkk5vBxflqLW9hTHX/0cs+P3gW+cQw== + dependencies: + abab "^2.0.6" + cssstyle "^3.0.0" + data-urls "^4.0.0" + decimal.js "^10.4.3" + domexception "^4.0.0" + form-data "^4.0.0" + html-encoding-sniffer "^3.0.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.1" is-potential-custom-element-name "^1.0.1" - nwsapi "^2.2.0" - parse5 "6.0.1" - saxes "^5.0.1" + nwsapi "^2.2.4" + parse5 "^7.1.2" + rrweb-cssom "^0.6.0" + saxes "^6.0.0" symbol-tree "^3.2.4" - tough-cookie "^4.0.0" - w3c-hr-time "^1.0.2" - w3c-xmlserializer "^2.0.0" - webidl-conversions "^6.1.0" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^8.5.0" - ws "^7.4.6" - xml-name-validator "^3.0.0" + tough-cookie "^4.1.2" + w3c-xmlserializer "^4.0.0" + webidl-conversions "^7.0.0" + whatwg-encoding "^2.0.0" + whatwg-mimetype "^3.0.0" + whatwg-url "^12.0.1" + ws "^8.13.0" + xml-name-validator "^4.0.0" jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= - -json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: +json-parse-even-better-errors@^2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== @@ -10777,12 +7281,7 @@ json-schema-traverse@^0.4.1: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-schema@0.2.3, json-schema@0.4.0, json-schema@^0.4.0: +json-schema@0.2.3, json-schema@0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== @@ -10826,17 +7325,15 @@ json5@^1.0.2: dependencies: minimist "^1.2.0" -json5@^2.1.2, json5@^2.2.0, json5@^2.2.2, json5@^2.2.3: +json5@^2.2.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== -jsonfile@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" - integrity sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= - optionalDependencies: - graceful-fs "^4.1.6" +jsonc-parser@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" + integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== jsonfile@^6.0.1: version "6.1.0" @@ -10852,11 +7349,6 @@ jsonify@~0.0.0: resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM= -jsonpointer@^5.0.0: - version "5.0.1" - resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" - integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== - jsprim@^1.2.2: version "1.4.1" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" @@ -10882,45 +7374,6 @@ jsx-dom-cjs@^8.0.0: dependencies: csstype "^3.1.0" -just-extend@^4.0.2: - version "4.2.1" - resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" - integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -kleur@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -klona@^2.0.4, klona@^2.0.5: - version "2.0.6" - resolved "https://registry.yarnpkg.com/klona/-/klona-2.0.6.tgz#85bffbf819c03b2f53270412420a4555ef882e22" - integrity sha512-dhG34DXATL5hSxJbIexCft8FChFXtmskoZYnoPWjXQuebWYCNkVeV3KkGegCK9CP1oswI/vQibS2GY7Em/sJJA== - language-subtag-registry@~0.3.2: version "0.3.21" resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz#04ac218bea46f04cb039084602c6da9e788dd45a" @@ -10933,39 +7386,10 @@ language-tags@=1.0.5: dependencies: language-subtag-registry "~0.3.2" -launch-editor@^2.6.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.6.0.tgz#4c0c1a6ac126c572bd9ff9a30da1d2cae66defd7" - integrity sha512-JpDCcQnyAAzZZaZ7vEiSqL690w7dAEyLao+KC96zBplnYbJS7TYNjvM3M7y3dGz+v7aIsJk3hllWuc0kWAjyRQ== - dependencies: - picocolors "^1.0.0" - shell-quote "^1.7.3" - -lazy-ass@1.6.0: - version "1.6.0" - resolved "https://registry.npmjs.org/lazy-ass/-/lazy-ass-1.6.0.tgz#7999655e8646c17f089fdd187d150d3324d54513" - integrity sha1-eZllXoZGwX8In90YfRUNMyTVRRM= - -less-loader@^7.3.0: - version "7.3.0" - resolved "https://registry.yarnpkg.com/less-loader/-/less-loader-7.3.0.tgz#f9d6d36d18739d642067a05fb5bd70c8c61317e5" - integrity sha512-Mi8915g7NMaLlgi77mgTTQvK022xKRQBIVDSyfl3ErTuBhmZBQab0mjeJjNNqGbdR+qrfTleKXqbGI4uEFavxg== - dependencies: - klona "^2.0.4" - loader-utils "^2.0.0" - schema-utils "^3.0.0" - -less-vars-to-js@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/less-vars-to-js/-/less-vars-to-js-1.3.0.tgz#c322cf43a3c8fc3fab655da3e51a14c1499ab571" - integrity sha512-xeiLLn/IMCGtdyCkYQnW8UuzoW2oYMCKg9boZRaGI58fLz5r90bNJDlqGzmVt/1Uqk75/DxIVtQSNCMkE5fRZQ== - dependencies: - strip-json-comments "^2.0.1" - -less@^4.1.1: - version "4.1.3" - resolved "https://registry.yarnpkg.com/less/-/less-4.1.3.tgz#175be9ddcbf9b250173e0a00b4d6920a5b770246" - integrity sha512-w16Xk/Ta9Hhyei0Gpz9m7VS8F28nieJaL/VyShID7cYvP6IL5oHeL6p4TXSDJqZE/lNv0oJ2pGVjJsRkfwm5FA== +less@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/less/-/less-4.2.0.tgz#cbefbfaa14a4cd388e2099b2b51f956e1465c450" + integrity sha512-P3b3HJDBtSzsXUl0im2L7gTO5Ubg8mEN6G8qoTS77iXxXX4Hvu4Qj540PZDvQ8V6DmX6iXo98k7Md0Cm1PrLaA== dependencies: copy-anything "^2.0.1" parse-node-version "^1.0.1" @@ -10979,11 +7403,6 @@ less@^4.1.1: needle "^3.1.0" source-map "~0.6.0" -leven@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - levn@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" @@ -10992,14 +7411,6 @@ levn@^0.4.1: prelude-ls "^1.2.1" type-check "~0.4.0" -levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - lib0@^0.2.42, lib0@^0.2.49: version "0.2.53" resolved "https://registry.yarnpkg.com/lib0/-/lib0-0.2.53.tgz#ee674571bc0a597bc06a03767908049fedab34fc" @@ -11007,11 +7418,6 @@ lib0@^0.2.42, lib0@^0.2.49: dependencies: isomorphic.js "^0.2.4" -lilconfig@^2.0.3, lilconfig@^2.0.5, lilconfig@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" - integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== - lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" @@ -11031,32 +7437,10 @@ listr2@^4.0.5: through "^2.3.8" wrap-ansi "^7.0.0" -loader-runner@^4.2.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.3.0.tgz#c1b4a163b99f614830353b16755e7149ac2314e1" - integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== - -loader-utils@^2.0.0, loader-utils@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.4.tgz#8b5cb38b5c34a9a018ee1fc0e6a066d1dfcc528c" - integrity sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - -loader-utils@^3.2.0: - version "3.2.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-3.2.1.tgz#4fb104b599daafd82ef3e1a41fb9265f87e1f576" - integrity sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw== - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" +local-pkg@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/local-pkg/-/local-pkg-0.4.3.tgz#0ff361ab3ae7f1c19113d9bb97b98b905dbc4963" + integrity sha512-SFppqq5p42fe2qcZQqqEOiVRXl+WCP1MdT6k7BDEW1j++sp5fIY+/fdRQitvKgB5BrBcmrs5m/L0v2FrU5MY1g== locate-path@^5.0.0: version "5.0.0" @@ -11077,142 +7461,22 @@ lodash-es@^4.17.15: resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== -lodash.assign@^4.2.0: - version "4.2.0" - resolved "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" - integrity sha1-DZnzzNem0mHRm9rrkkUAXShYCOc= - -lodash.camelcase@^4.3.0: - version "4.3.0" - resolved "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" - integrity sha1-soqmKIorn8ZRA1x3EfZathkDMaY= - -lodash.clonedeep@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" - integrity sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ== - -lodash.compact@^3.0.1: - version "3.0.1" - resolved "https://registry.npmjs.org/lodash.compact/-/lodash.compact-3.0.1.tgz#540ce3837745975807471e16b4a2ba21e7256ca5" - integrity sha1-VAzjg3dFl1gHRx4WtKK6IeclbKU= - -lodash.debounce@4.0.8, lodash.debounce@^4.0.8: +lodash.debounce@4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= -lodash.find@^4.6.0: - version "4.6.0" - resolved "https://registry.npmjs.org/lodash.find/-/lodash.find-4.6.0.tgz#cb0704d47ab71789ffa0de8b97dd926fb88b13b1" - integrity sha1-ywcE1Hq3F4n/oN6Ll92Sb7iLE7E= - -lodash.flatten@^4.4.0: - version "4.4.0" - resolved "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" - integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8= - -lodash.forin@^4.4.0: - version "4.4.0" - resolved "https://registry.npmjs.org/lodash.forin/-/lodash.forin-4.4.0.tgz#5d3f20ae564011fbe88381f7d98949c9c9519731" - integrity sha1-XT8grlZAEfvog4H32YlJyclRlzE= - -lodash.get@^4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" - integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk= - -lodash.has@^4.5.2: - version "4.5.2" - resolved "https://registry.npmjs.org/lodash.has/-/lodash.has-4.5.2.tgz#d19f4dc1095058cccbe2b0cdf4ee0fe4aa37c862" - integrity sha1-0Z9NwQlQWMzL4rDN9O4P5Ko3yGI= - -lodash.invokemap@^4.6.0: - version "4.6.0" - resolved "https://registry.npmjs.org/lodash.invokemap/-/lodash.invokemap-4.6.0.tgz#1748cda5d8b0ef8369c4eb3ec54c21feba1f2d62" - integrity sha1-F0jNpdiw74NpxOs+xUwh/rofLWI= - -lodash.isempty@^4.4.0: - version "4.4.0" - resolved "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e" - integrity sha1-b4bL7di+TsmHvpqvM8loTbGzHn4= - -lodash.isequal@^4.5.0: - version "4.5.0" - resolved "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0" - integrity sha1-QVxEePK8wwEgwizhDtMib30+GOA= - -lodash.isfunction@^3.0.9: - version "3.0.9" - resolved "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz#06de25df4db327ac931981d1bdb067e5af68d051" - integrity sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw== - -lodash.isinteger@^4.0.4: - version "4.0.4" - resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" - integrity sha1-YZwK89A/iwTDH1iChAt3sRzWg0M= - -lodash.isplainobject@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" - integrity sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= - -lodash.lowerfirst@^4.3.1: - version "4.3.1" - resolved "https://registry.npmjs.org/lodash.lowerfirst/-/lodash.lowerfirst-4.3.1.tgz#de3c7b12e02c6524a0059c2f6cb7c5c52655a13d" - integrity sha1-3jx7EuAsZSSgBZwvbLfFxSZVoT0= - -lodash.map@^4.6.0: - version "4.6.0" - resolved "https://registry.npmjs.org/lodash.map/-/lodash.map-4.6.0.tgz#771ec7839e3473d9c4cde28b19394c3562f4f6d3" - integrity sha1-dx7Hg540c9nEzeKLGTlMNWL09tM= - -lodash.mapvalues@^4.6.0: - version "4.6.0" - resolved "https://registry.npmjs.org/lodash.mapvalues/-/lodash.mapvalues-4.6.0.tgz#1bafa5005de9dd6f4f26668c30ca37230cc9689c" - integrity sha1-G6+lAF3p3W9PJmaMMMo3IwzJaJw= - -lodash.memoize@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= - lodash.merge@^4.6.2: version "4.6.2" resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== -lodash.pick@4.4.0, lodash.pick@^4.4.0: +lodash.pick@4.4.0: version "4.4.0" resolved "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" integrity sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM= -lodash.snakecase@^4.1.1: - version "4.1.1" - resolved "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" - integrity sha1-OdcUo1NXFHg3rv1ktdy7Fr7Nj40= - -lodash.sortby@^4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" - integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== - -lodash.uniq@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" - integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= - -lodash.uniqby@^4.7.0: - version "4.7.0" - resolved "https://registry.npmjs.org/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" - integrity sha1-2ZwHpmnp5tJOE2Lf4mbGdhavEwI= - -lodash.values@^4.3.0: - version "4.3.0" - resolved "https://registry.npmjs.org/lodash.values/-/lodash.values-4.3.0.tgz#a3a6c2b0ebecc5c2cba1c17e6e620fe81b53d347" - integrity sha1-o6bCsOvsxcLLocF+bmIP6BtT00c= - -lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0, lodash@~4.17.0: +lodash@^4.0.1, lodash@^4.17.11, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@~4.17.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -11247,6 +7511,13 @@ loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3 dependencies: js-tokens "^3.0.0 || ^4.0.0" +loupe@^2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.7.tgz#6e69b7d4db7d3ab436328013d37d1c8c3540c697" + integrity sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA== + dependencies: + get-func-name "^2.0.1" + lower-case-first@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/lower-case-first/-/lower-case-first-2.0.2.tgz#64c2324a2250bf7c37c5901e76a5b5309301160b" @@ -11283,17 +7554,17 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" -lz-string@^1.4.4: - version "1.4.4" - resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.4.4.tgz#c0d8eaf36059f705796e1e344811cf4c498d3a26" - integrity sha1-wNjq82BZ9wV5bh40SBHPTEmNOiY= +lz-string@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/lz-string/-/lz-string-1.5.0.tgz#c1ab50f77887b712621201ba9fd4e3a6ed099941" + integrity sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ== -magic-string@^0.25.0, magic-string@^0.25.7: - version "0.25.7" - resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" - integrity sha512-4CrMT5DOHTDk4HYDlzmwu4FVCcIYI8gauveasrdCu2IKIFOJ3f0v/8MDGJCDL9oD2ppz/Av1b0Nj345H9M+XIA== +magic-string@^0.30.1: + version "0.30.5" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.5.tgz#1994d980bd1c8835dc6e78db7cbd4ae4f24746f9" + integrity sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA== dependencies: - sourcemap-codec "^1.4.4" + "@jridgewell/sourcemap-codec" "^1.4.15" make-dir@^2.1.0: version "2.1.0" @@ -11303,14 +7574,7 @@ make-dir@^2.1.0: pify "^4.0.1" semver "^5.6.0" -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -make-error@^1.1.1, make-error@^1.3.6: +make-error@^1.3.6: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== @@ -11320,30 +7584,11 @@ make-plural@^6.2.1, make-plural@^6.2.2: resolved "https://registry.yarnpkg.com/make-plural/-/make-plural-6.2.2.tgz#beb5fd751355e72660eeb2218bb98eec92853c6c" integrity sha512-8iTuFioatnTTmb/YJjywkVIHLjcwkFD9Ms0JpxjEm9Mo8eQYkh1z+55dwv4yc1jQ8ftVBxWQbihvZL1DfzGGWA== -makeerror@1.0.x: - version "1.0.11" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" - integrity sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw= - dependencies: - tmpl "1.0.x" - -map-cache@^0.2.0, map-cache@^0.2.2: +map-cache@^0.2.0: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= -map-stream@~0.1.0: - version "0.1.0" - resolved "https://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz#e56aa94c4c8055a16404a0674b78f215f7c8e194" - integrity sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - markdown-table@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-2.0.0.tgz#194a90ced26d31fe753d8b9434430214c011865b" @@ -11479,38 +7724,11 @@ mdn-data@2.0.14: resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50" integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== -mdn-data@2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" - integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== - mdurl@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" integrity sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4= -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== - -memfs@^3.1.2, memfs@^3.4.3: - version "3.6.0" - resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.6.0.tgz#d7a2110f86f79dd950a8b6df6d57bc984aa185f6" - integrity sha512-EGowvkkgbMcIChjMTMkESFDbZeSh8xZ7kNSF0hAiAN4Jh6jgHCRS0Ga/+C8y6Au+oqpezRHCfPsmJ2+DwAgiwQ== - dependencies: - fs-monkey "^1.0.4" - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" @@ -11526,11 +7744,6 @@ messageformat-parser@^4.1.3: resolved "https://registry.yarnpkg.com/messageformat-parser/-/messageformat-parser-4.1.3.tgz#b824787f57fcda7d50769f5b63e8d4fda68f5b9e" integrity sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg== -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== - micromark-extension-gfm-autolink-literal@~0.5.0: version "0.5.7" resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz#53866c1f0c7ef940ae7ca1f72c6faef8fed9f204" @@ -11584,26 +7797,7 @@ micromark@^2.11.3, micromark@~2.11.0, micromark@~2.11.3: debug "^4.0.0" parse-entities "^2.0.0" -micromatch@^3.1.4: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: +micromatch@^4.0.4, micromatch@^4.0.5: version "4.0.5" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== @@ -11611,19 +7805,19 @@ micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: braces "^3.0.2" picomatch "^2.3.1" -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": +mime-db@1.52.0: version "1.52.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@^2.1.12, mime-types@~2.1.19: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" -mime@1.6.0, mime@^1.4.1: +mime@^1.4.1: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== @@ -11645,26 +7839,6 @@ min-indent@^1.0.0: resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869" integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg== -mini-create-react-context@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz#072171561bfdc922da08a60c2197a497cc2d1d5e" - integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ== - dependencies: - "@babel/runtime" "^7.12.1" - tiny-warning "^1.0.3" - -mini-css-extract-plugin@^2.4.5: - version "2.7.6" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.7.6.tgz#282a3d38863fddcd2e0c220aaed5b90bc156564d" - integrity sha512-Qk7HcgaPkGG6eD77mLvZS1nmxlao3j+9PkrT9Uc7HAE1id3F41+DdBRYRYkbyfNRGzm8/YWtzhw7nVPmwhqTQw== - dependencies: - schema-utils "^4.0.0" - -minimalistic-assert@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch@^4.2.3, minimatch@^5.0.1: version "3.0.5" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.5.tgz#4da8f1290ee0f0f8e83d60ca69f8f134068604a3" @@ -11672,72 +7846,32 @@ minimatch@3.0.5, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.2, minimatch dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8: +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -miragejs@^0.1.0, miragejs@^0.1.41: - version "0.1.41" - resolved "https://registry.npmjs.org/miragejs/-/miragejs-0.1.41.tgz#1b06a2d2d9de65624f5bb1cee7ebb4a208f554d0" - integrity sha512-ur8x7sBskgey64vdzKGVCVC3hgKXWl2Cg5lZbxd6OmKrhr9LCCP/Bv7qh4wsQxIMHZnENxybFATXnrQ+rzSOWQ== - dependencies: - "@miragejs/pretender-node-polyfill" "^0.1.0" - inflected "^2.0.4" - lodash.assign "^4.2.0" - lodash.camelcase "^4.3.0" - lodash.clonedeep "^4.5.0" - lodash.compact "^3.0.1" - lodash.find "^4.6.0" - lodash.flatten "^4.4.0" - lodash.forin "^4.4.0" - lodash.get "^4.4.2" - lodash.has "^4.5.2" - lodash.invokemap "^4.6.0" - lodash.isempty "^4.4.0" - lodash.isequal "^4.5.0" - lodash.isfunction "^3.0.9" - lodash.isinteger "^4.0.4" - lodash.isplainobject "^4.0.6" - lodash.lowerfirst "^4.3.1" - lodash.map "^4.6.0" - lodash.mapvalues "^4.6.0" - lodash.pick "^4.4.0" - lodash.snakecase "^4.1.1" - lodash.uniq "^4.5.0" - lodash.uniqby "^4.7.0" - lodash.values "^4.3.0" - pretender "^3.4.3" - mitt@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mitt/-/mitt-2.1.0.tgz#f740577c23176c6205b121b2973514eade1b2230" integrity sha512-ILj2TpLiysu2wkBbWjAmww7TkZb65aiQO+DkVdUTBpBXq+MHYiETENkKFMtsJZX1Lf4pe4QOrTSjIfUwN5lRdg== -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp@^0.5.1, mkdirp@~0.5.1: +mkdirp@^0.5.1: version "0.5.5" resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== dependencies: minimist "^1.2.5" -mocked-env@1.3.2: - version "1.3.2" - resolved "https://registry.npmjs.org/mocked-env/-/mocked-env-1.3.2.tgz#548eb2fde141d083de70dc6b231cd9f3210d8731" - integrity sha512-jwm3ziowCjpbLNhUNYwn2G0tawV/ZGRuWeEGt6PItrkQT74Nk3pDldL2pmwm9sQZw6a/x+ZBGeBVYq54acTauQ== +mlly@^1.2.0, mlly@^1.4.0: + version "1.4.2" + resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.4.2.tgz#7cf406aa319ff6563d25da6b36610a93f2a8007e" + integrity sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg== dependencies: - check-more-types "2.24.0" - debug "4.1.1" - lazy-ass "1.6.0" - ramda "0.26.1" + acorn "^8.10.0" + pathe "^1.1.1" + pkg-types "^1.0.3" + ufo "^1.3.0" moment-timezone@^0.5.35: version "0.5.35" @@ -11756,29 +7890,16 @@ monaco-editor@^0.28.1: resolved "https://registry.yarnpkg.com/monaco-editor/-/monaco-editor-0.28.1.tgz#732788ff2172d59e6d436b206da8cac715413940" integrity sha512-P1vPqxB4B1ZFzTeR1ScggSp9/5NoQrLCq88fnlNUsuRAP1usEBN4TIpI2lw0AYIZNVIanHk0qwjze2uJwGOHUw== -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== - ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@2.1.3, ms@^2.1.1: +ms@^2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== -multicast-dns@^7.2.5: - version "7.2.5" - resolved "https://registry.yarnpkg.com/multicast-dns/-/multicast-dns-7.2.5.tgz#77eb46057f4d7adbd16d9290fa7299f6fa64cced" - integrity sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg== - dependencies: - dns-packet "^5.2.2" - thunky "^1.0.2" - multishift@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/multishift/-/multishift-2.0.5.tgz#443aa67bcba2f8b74e051f183de7431c6ab32f4c" @@ -11800,15 +7921,6 @@ mute-stream@0.0.8: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== -mz@^2.7.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" - integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== - dependencies: - any-promise "^1.0.0" - object-assign "^4.0.1" - thenify-all "^1.0.0" - nano-css@^5.3.1: version "5.3.5" resolved "https://registry.yarnpkg.com/nano-css/-/nano-css-5.3.5.tgz#3075ea29ffdeb0c7cb6d25edb21d8f7fa8e8fe8e" @@ -11833,23 +7945,6 @@ nanoid@^3.3.6: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c" integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA== -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - natural-compare-lite@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz#17b09581988979fddafe0201e931ba933c96cbb4" @@ -11861,35 +7956,13 @@ natural-compare@^1.4.0: integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= needle@^3.1.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/needle/-/needle-3.2.0.tgz#07d240ebcabfd65c76c03afae7f6defe6469df44" - integrity sha512-oUvzXnyLiVyVGoianLijF9O/RecZUf7TkBfimjGrLM4eQhXyeJwM6GeAWccwfQ9aa4gMCZKqhAOuLaMIcQxajQ== + version "3.3.1" + resolved "https://registry.yarnpkg.com/needle/-/needle-3.3.1.tgz#63f75aec580c2e77e209f3f324e2cdf3d29bd049" + integrity sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q== dependencies: - debug "^3.2.6" iconv-lite "^0.6.3" sax "^1.2.4" -negotiator@0.6.3: - version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" - integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== - -neo-async@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" - integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== - -nise@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.0.tgz#713ef3ed138252daef20ec035ab62b7a28be645c" - integrity sha512-W5WlHu+wvo3PaKLsJJkgPup2LrsXCcm7AWwyNZkUnn5rwPkuPBi3Iwk5SQtN0mv+K65k7nKKjwNQ30wg3wLAQQ== - dependencies: - "@sinonjs/commons" "^1.7.0" - "@sinonjs/fake-timers" "^7.0.4" - "@sinonjs/text-encoding" "^0.7.1" - just-extend "^4.0.2" - path-to-regexp "^1.7.0" - no-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" @@ -11905,11 +7978,6 @@ node-fetch@2.6.7, node-fetch@^2.6.1: dependencies: whatwg-url "^5.0.0" -node-forge@^1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== - node-int64@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" @@ -11920,10 +7988,10 @@ node-releases@^2.0.12: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== -node-releases@^2.0.14: - version "2.0.14" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" - integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== +node-releases@^2.0.13: + version "2.0.13" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.13.tgz#d5ed1627c23e3461e819b02e57b75e4899b1c81d" + integrity sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ== normalize-path@^2.1.1: version "2.1.1" @@ -11937,24 +8005,7 @@ normalize-path@^3.0.0, normalize-path@~3.0.0: resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== -normalize-range@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= - -normalize-url@^6.0.1: - version "6.1.0" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" - integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -nth-check@^1.0.2, nth-check@^2.0.1: +nth-check@^2.0.1: version "2.1.1" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-2.1.1.tgz#c9eab428effce36cd6b92c924bdb000ef1f1ed1d" integrity sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w== @@ -11971,52 +8022,39 @@ number-is-nan@^1.0.0: resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= -nwsapi@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" - integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== +nwsapi@^2.2.4: + version "2.2.7" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.7.tgz#738e0707d3128cb750dddcfe90e4610482df0f30" + integrity sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ== oauth-sign@~0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== -object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: +object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - -object-hash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" - integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== - object-inspect@^1.12.2, object-inspect@^1.9.0: version "1.12.2" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== +object-is@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4: version "4.1.4" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" @@ -12045,15 +8083,6 @@ object.fromentries@^2.0.6: define-properties "^1.1.4" es-abstract "^1.20.4" -object.getownpropertydescriptors@^2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz#1bd63aeacf0d5d2d2f31b5e393b03a7c601a23f7" - integrity sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - object.hasown@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.2.tgz#f919e21fad4eb38a57bc6345b3afd496515c3f92" @@ -12076,7 +8105,7 @@ object.pick@^1.3.0: dependencies: isobject "^3.0.1" -object.values@^1.1.0, object.values@^1.1.6: +object.values@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.6.tgz#4abbaa71eba47d63589d402856f908243eea9b1d" integrity sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw== @@ -12085,23 +8114,6 @@ object.values@^1.1.0, object.values@^1.1.6: define-properties "^1.1.4" es-abstract "^1.20.4" -obuf@^1.0.0, obuf@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" - integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== - -on-finished@2.4.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" - integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== - dependencies: - ee-first "1.1.1" - -on-headers@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/on-headers/-/on-headers-1.0.2.tgz#772b0ae6aaa525c399e489adfad90c403eb3c28f" - integrity sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA== - once@^1.3.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -12109,7 +8121,7 @@ once@^1.3.0: dependencies: wrappy "1" -onetime@^5.1.0, onetime@^5.1.2: +onetime@^5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== @@ -12124,15 +8136,6 @@ open@^7.3.1: is-docker "^2.0.0" is-wsl "^2.1.1" -open@^8.0.9, open@^8.4.0: - version "8.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-8.4.2.tgz#5b5ffe2a8f793dcd2aad73e550cb87b59cb084f9" - integrity sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ== - dependencies: - define-lazy-prop "^2.0.0" - is-docker "^2.1.1" - is-wsl "^2.2.0" - optimism@^0.16.0: version "0.16.1" resolved "https://registry.yarnpkg.com/optimism/-/optimism-0.16.1.tgz#7c8efc1f3179f18307b887e18c15c5b7133f6e7d" @@ -12141,18 +8144,6 @@ optimism@^0.16.0: "@wry/context" "^0.6.0" "@wry/trie" "^0.3.0" -optionator@^0.8.1: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - optionator@^0.9.1: version "0.9.1" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" @@ -12180,10 +8171,10 @@ ora@^5.4.1: strip-ansi "^6.0.0" wcwidth "^1.0.1" -orderedmap@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/orderedmap/-/orderedmap-2.1.0.tgz#819457082fa3a06abd316d83a281a1ca467437cd" - integrity sha512-/pIFexOm6S70EPdznemIz3BQZoJ4VTFrhqzu0ACBqBgeLsLxq8e6Jim63ImIfwW/zAD1AlXpRMlOv3aghmo4dA== +orderedmap@^1.1.0: + version "1.1.8" + resolved "https://registry.yarnpkg.com/orderedmap/-/orderedmap-1.1.8.tgz#9652b2584f721c1032fa04cb60d442b3d4aa097c" + integrity sha512-eWEYOAggZZpZbJ9CTsqAKOTxlbBHdHZ8pzcfEvNTxGrjQ/m+Q25nSWUiMlT9MTbgpB6FOiBDKqsgJ2FlLDVNaw== os-tmpdir@~1.0.2: version "1.0.2" @@ -12197,19 +8188,19 @@ p-limit@3.1.0, p-limit@^3.0.2: dependencies: yocto-queue "^0.1.0" -p-limit@^2.0.0, p-limit@^2.2.0: +p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== +p-limit@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-4.0.0.tgz#914af6544ed32bfa54670b061cafcbd04984b644" + integrity sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ== dependencies: - p-limit "^2.0.0" + yocto-queue "^1.0.0" p-locate@^4.1.0: version "4.1.0" @@ -12232,14 +8223,6 @@ p-map@^4.0.0: dependencies: aggregate-error "^3.0.0" -p-retry@^4.5.0: - version "4.6.2" - resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.2.tgz#9baae7184057edd4e17231cee04264106e092a16" - integrity sha512-312Id396EbJdvRONlngUx0NydfrIQ5lsYu0znKVUzVvArzEIt08V1qhtyESbGVd1FGX7UKtiFp5uwKZdM8wIuQ== - dependencies: - "@types/retry" "0.12.0" - retry "^0.13.1" - p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -12306,15 +8289,17 @@ parse-node-version@^1.0.1: resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b" integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA== -parse5@6.0.1, parse5@^6.0.0: +parse5@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/parse5/-/parse5-6.0.1.tgz#e1a1c085c569b3dc08321184f19a39cc27f7c30b" integrity sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw== -parseurl@~1.3.2, parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== +parse5@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32" + integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== + dependencies: + entities "^4.4.0" pascal-case@^3.1.1, pascal-case@^3.1.2: version "3.1.2" @@ -12324,11 +8309,6 @@ pascal-case@^3.1.1, pascal-case@^3.1.2: no-case "^3.0.4" tslib "^2.0.3" -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - path-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f" @@ -12337,11 +8317,6 @@ path-case@^3.0.4: dot-case "^3.0.4" tslib "^2.0.3" -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= - path-exists@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" @@ -12352,7 +8327,7 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= -path-key@^3.0.0, path-key@^3.1.0: +path-key@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== @@ -12374,11 +8349,6 @@ path-root@^0.1.1: dependencies: path-root-regex "^0.1.0" -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== - path-to-regexp@^1.7.0: version "1.8.0" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" @@ -12391,614 +8361,54 @@ path-type@^4.0.0: resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -pause-stream@0.0.11: - version "0.0.11" - resolved "https://registry.npmjs.org/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445" - integrity sha1-/lo0sMvOErWqaitAPuLnO2AvFEU= - dependencies: - through "~2.3" +pathe@^1.1.0, pathe@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.1.tgz#1dd31d382b974ba69809adc9a7a347e65d84829a" + integrity sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q== + +pathval@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" + integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ== performance-now@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= -picocolors@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" - integrity sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA== - picocolors@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -pify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== -pirates@^4.0.1, pirates@^4.0.4: - version "4.0.6" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" - integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== - -pkg-dir@^4.1.0, pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pkg-up@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" - integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== - dependencies: - find-up "^3.0.0" - -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - -postcss-attribute-case-insensitive@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741" - integrity sha512-XIidXV8fDr0kKt28vqki84fRK8VW8eTuIa4PChv2MqKuT6C9UjmSKzen6KaWhWEoYvwxFCa7n/tC1SZ3tyq4SQ== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-browser-comments@^4: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-browser-comments/-/postcss-browser-comments-4.0.0.tgz#bcfc86134df5807f5d3c0eefa191d42136b5e72a" - integrity sha512-X9X9/WN3KIvY9+hNERUqX9gncsgBA25XaeR+jshHz2j8+sYyHktHw1JdKuMjeLpGktXidqDhA7b/qm1mrBDmgg== - -postcss-calc@^8.2.3: - version "8.2.4" - resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.2.4.tgz#77b9c29bfcbe8a07ff6693dc87050828889739a5" - integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== - dependencies: - postcss-selector-parser "^6.0.9" - postcss-value-parser "^4.2.0" - -postcss-clamp@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/postcss-clamp/-/postcss-clamp-4.1.0.tgz#7263e95abadd8c2ba1bd911b0b5a5c9c93e02363" - integrity sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-functional-notation@^4.2.4: - version "4.2.4" - resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-4.2.4.tgz#21a909e8d7454d3612d1659e471ce4696f28caec" - integrity sha512-2yrTAUZUab9s6CpxkxC4rVgFEVaR6/2Pipvi6qcgvnYiVqZcbDHEoBDhrXzyb7Efh2CCfHQNtcqWcIruDTIUeg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-hex-alpha@^8.0.4: - version "8.0.4" - resolved "https://registry.yarnpkg.com/postcss-color-hex-alpha/-/postcss-color-hex-alpha-8.0.4.tgz#c66e2980f2fbc1a63f5b079663340ce8b55f25a5" - integrity sha512-nLo2DCRC9eE4w2JmuKgVA3fGL3d01kGq752pVALF68qpGLmx2Qrk91QTKkdUqqp45T1K1XV8IhQpcu1hoAQflQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-color-rebeccapurple@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-7.1.1.tgz#63fdab91d878ebc4dd4b7c02619a0c3d6a56ced0" - integrity sha512-pGxkuVEInwLHgkNxUc4sdg4g3py7zUeCQ9sMfwyHAT+Ezk8a4OaaVZ8lIY5+oNqA/BXXgLyXv0+5wHP68R79hg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-colormin@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-5.3.1.tgz#86c27c26ed6ba00d96c79e08f3ffb418d1d1988f" - integrity sha512-UsWQG0AqTFQmpBegeLLc1+c3jIqBNB0zlDGRWR+dQ3pRKJL1oeMzyqmH3o2PIfn9MBdNrVPWhDbT769LxCTLJQ== - dependencies: - browserslist "^4.21.4" - caniuse-api "^3.0.0" - colord "^2.9.1" - postcss-value-parser "^4.2.0" - -postcss-convert-values@^5.1.3: - version "5.1.3" - resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-5.1.3.tgz#04998bb9ba6b65aa31035d669a6af342c5f9d393" - integrity sha512-82pC1xkJZtcJEfiLw6UXnXVXScgtBrjlO5CBmuDQc+dlb88ZYheFsjTn40+zBVi3DkfF7iezO0nJUPLcJK3pvA== - dependencies: - browserslist "^4.21.4" - postcss-value-parser "^4.2.0" - -postcss-custom-media@^8.0.2: - version "8.0.2" - resolved "https://registry.yarnpkg.com/postcss-custom-media/-/postcss-custom-media-8.0.2.tgz#c8f9637edf45fef761b014c024cee013f80529ea" - integrity sha512-7yi25vDAoHAkbhAzX9dHx2yc6ntS4jQvejrNcC+csQJAXjj15e7VcWfMgLqBNAbOvqi5uIa9huOVwdHbf+sKqg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-properties@^12.1.10: - version "12.1.11" - resolved "https://registry.yarnpkg.com/postcss-custom-properties/-/postcss-custom-properties-12.1.11.tgz#d14bb9b3989ac4d40aaa0e110b43be67ac7845cf" - integrity sha512-0IDJYhgU8xDv1KY6+VgUwuQkVtmYzRwu+dMjnmdMafXYv86SWqfxkc7qdDvWS38vsjaEtv8e0vGOUQrAiMBLpQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-custom-selectors@^6.0.3: - version "6.0.3" - resolved "https://registry.yarnpkg.com/postcss-custom-selectors/-/postcss-custom-selectors-6.0.3.tgz#1ab4684d65f30fed175520f82d223db0337239d9" - integrity sha512-fgVkmyiWDwmD3JbpCmB45SvvlCD6z9CG6Ie6Iere22W5aHea6oWa7EM2bpnv2Fj3I94L3VbtvX9KqwSi5aFzSg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-dir-pseudo-class@^6.0.5: - version "6.0.5" - resolved "https://registry.yarnpkg.com/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-6.0.5.tgz#2bf31de5de76added44e0a25ecf60ae9f7c7c26c" - integrity sha512-eqn4m70P031PF7ZQIvSgy9RSJ5uI2171O/OO/zcRNYpJbvaeKFUlar1aJ7rmgiQtbm0FSPsRewjpdS0Oew7MPA== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-discard-comments@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz#8df5e81d2925af2780075840c1526f0660e53696" - integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== - -postcss-discard-duplicates@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz#9eb4fe8456706a4eebd6d3b7b777d07bad03e848" - integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== - -postcss-discard-empty@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz#e57762343ff7f503fe53fca553d18d7f0c369c6c" - integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== - -postcss-discard-overridden@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz#7e8c5b53325747e9d90131bb88635282fb4a276e" - integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== - -postcss-double-position-gradients@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/postcss-double-position-gradients/-/postcss-double-position-gradients-3.1.2.tgz#b96318fdb477be95997e86edd29c6e3557a49b91" - integrity sha512-GX+FuE/uBR6eskOK+4vkXgT6pDkexLokPaz/AbJna9s5Kzp/yl488pKPjhy0obB475ovfT1Wv8ho7U/cHNaRgQ== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-env-function@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/postcss-env-function/-/postcss-env-function-4.0.6.tgz#7b2d24c812f540ed6eda4c81f6090416722a8e7a" - integrity sha512-kpA6FsLra+NqcFnL81TnsU+Z7orGtDTxcOhl6pwXeEq1yFPpRMkCDpHhrz8CFQDr/Wfm0jLiNQ1OsGGPjlqPwA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-flexbugs-fixes@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-5.0.2.tgz#2028e145313074fc9abe276cb7ca14e5401eb49d" - integrity sha512-18f9voByak7bTktR2QgDveglpn9DTbBWPUzSOe9g0N4WR/2eSt6Vrcbf0hmspvMI6YWGywz6B9f7jzpFNJJgnQ== - -postcss-focus-visible@^6.0.4: - version "6.0.4" - resolved "https://registry.yarnpkg.com/postcss-focus-visible/-/postcss-focus-visible-6.0.4.tgz#50c9ea9afa0ee657fb75635fabad25e18d76bf9e" - integrity sha512-QcKuUU/dgNsstIK6HELFRT5Y3lbrMLEOwG+A4s5cA+fx3A3y/JTq3X9LaOj3OC3ALH0XqyrgQIgey/MIZ8Wczw== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-focus-within@^5.0.4: - version "5.0.4" - resolved "https://registry.yarnpkg.com/postcss-focus-within/-/postcss-focus-within-5.0.4.tgz#5b1d2ec603195f3344b716c0b75f61e44e8d2e20" - integrity sha512-vvjDN++C0mu8jz4af5d52CB184ogg/sSxAFS+oUJQq2SuCe7T5U2iIsVJtsCp2d6R4j0jr5+q3rPkBVZkXD9fQ== - dependencies: - postcss-selector-parser "^6.0.9" - -postcss-font-variant@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-font-variant/-/postcss-font-variant-5.0.0.tgz#efd59b4b7ea8bb06127f2d031bfbb7f24d32fa66" - integrity sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA== - -postcss-gap-properties@^3.0.5: - version "3.0.5" - resolved "https://registry.yarnpkg.com/postcss-gap-properties/-/postcss-gap-properties-3.0.5.tgz#f7e3cddcf73ee19e94ccf7cb77773f9560aa2fff" - integrity sha512-IuE6gKSdoUNcvkGIqdtjtcMtZIFyXZhmFd5RUlg97iVEvp1BZKV5ngsAjCjrVy+14uhGBQl9tzmi1Qwq4kqVOg== - -postcss-image-set-function@^4.0.7: - version "4.0.7" - resolved "https://registry.yarnpkg.com/postcss-image-set-function/-/postcss-image-set-function-4.0.7.tgz#08353bd756f1cbfb3b6e93182c7829879114481f" - integrity sha512-9T2r9rsvYzm5ndsBE8WgtrMlIT7VbtTfE7b3BQnudUqnBcBo7L758oc+o+pdj/dUV0l5wjwSdjeOH2DZtfv8qw== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-import@^15.1.0: - version "15.1.0" - resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" - integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== - dependencies: - postcss-value-parser "^4.0.0" - read-cache "^1.0.0" - resolve "^1.1.7" - -postcss-initial@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-initial/-/postcss-initial-4.0.1.tgz#529f735f72c5724a0fb30527df6fb7ac54d7de42" - integrity sha512-0ueD7rPqX8Pn1xJIjay0AZeIuDoF+V+VvMt/uOnn+4ezUKhZM/NokDeP6DwMNyIoYByuN/94IQnt5FEkaN59xQ== - -postcss-js@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" - integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== - dependencies: - camelcase-css "^2.0.1" - -postcss-lab-function@^4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/postcss-lab-function/-/postcss-lab-function-4.2.1.tgz#6fe4c015102ff7cd27d1bd5385582f67ebdbdc98" - integrity sha512-xuXll4isR03CrQsmxyz92LJB2xX9n+pZJ5jE9JgcnmsCammLyKdlzrBin+25dy6wIjfhJpKBAN80gsTlCgRk2w== - dependencies: - "@csstools/postcss-progressive-custom-properties" "^1.1.0" - postcss-value-parser "^4.2.0" - -postcss-load-config@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.1.tgz#152383f481c2758274404e4962743191d73875bd" - integrity sha512-vEJIc8RdiBRu3oRAI0ymerOn+7rPuMvRXslTvZUKZonDHFIczxztIyJ1urxM1x9JXEikvpWWTUUqal5j/8QgvA== - dependencies: - lilconfig "^2.0.5" - yaml "^2.1.1" - -postcss-loader@^6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.1.tgz#0895f7346b1702103d30fdc66e4d494a93c008ef" - integrity sha512-WbbYpmAaKcux/P66bZ40bpWsBucjx/TTgVVzRZ9yUO8yQfVBlameJ0ZGVaPfH64hNSBh63a+ICP5nqOpBA0w+Q== - dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.5" - semver "^7.3.5" - -postcss-logical@^5.0.4: - version "5.0.4" - resolved "https://registry.yarnpkg.com/postcss-logical/-/postcss-logical-5.0.4.tgz#ec75b1ee54421acc04d5921576b7d8db6b0e6f73" - integrity sha512-RHXxplCeLh9VjinvMrZONq7im4wjWGlRJAqmAVLXyZaXwfDWP73/oq4NdIp+OZwhQUMj0zjqDfM5Fj7qby+B4g== - -postcss-media-minmax@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/postcss-media-minmax/-/postcss-media-minmax-5.0.0.tgz#7140bddec173e2d6d657edbd8554a55794e2a5b5" - integrity sha512-yDUvFf9QdFZTuCUg0g0uNSHVlJ5X1lSzDZjPSFaiCWvjgsvu8vEVxtahPrLMinIDEEGnx6cBe6iqdx5YWz08wQ== - -postcss-merge-longhand@^5.1.7: - version "5.1.7" - resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-5.1.7.tgz#24a1bdf402d9ef0e70f568f39bdc0344d568fb16" - integrity sha512-YCI9gZB+PLNskrK0BB3/2OzPnGhPkBEwmwhfYk1ilBHYVAZB7/tkTHFBAnCrvBBOmeYyMYw3DMjT55SyxMBzjQ== - dependencies: - postcss-value-parser "^4.2.0" - stylehacks "^5.1.1" - -postcss-merge-rules@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-5.1.4.tgz#2f26fa5cacb75b1402e213789f6766ae5e40313c" - integrity sha512-0R2IuYpgU93y9lhVbO/OylTtKMVcHb67zjWIfCiKR9rWL3GUk1677LAqD/BcHizukdZEjT8Ru3oHRoAYoJy44g== - dependencies: - browserslist "^4.21.4" - caniuse-api "^3.0.0" - cssnano-utils "^3.1.0" - postcss-selector-parser "^6.0.5" - -postcss-minify-font-values@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz#f1df0014a726083d260d3bd85d7385fb89d1f01b" - integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-minify-gradients@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz#f1fe1b4f498134a5068240c2f25d46fcd236ba2c" - integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== - dependencies: - colord "^2.9.1" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-params@^5.1.4: - version "5.1.4" - resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-5.1.4.tgz#c06a6c787128b3208b38c9364cfc40c8aa5d7352" - integrity sha512-+mePA3MgdmVmv6g+30rn57USjOGSAyuxUmkfiWpzalZ8aiBkdPYjXWtHuwJGm1v5Ojy0Z0LaSYhHaLJQB0P8Jw== - dependencies: - browserslist "^4.21.4" - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-minify-selectors@^5.2.1: - version "5.2.1" - resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz#d4e7e6b46147b8117ea9325a915a801d5fe656c6" - integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== - dependencies: - postcss-selector-parser "^6.0.5" - -postcss-modules-extract-imports@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz#cda1f047c0ae80c97dbe28c3e76a43b88025741d" - integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== - -postcss-modules-local-by-default@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.3.tgz#b08eb4f083050708998ba2c6061b50c2870ca524" - integrity sha512-2/u2zraspoACtrbFRnTijMiQtb4GW4BvatjaG/bCjYQo8kLTdevCUlwuBHx2sCnSyrI3x3qj4ZK1j5LQBgzmwA== - dependencies: - icss-utils "^5.0.0" - postcss-selector-parser "^6.0.2" - postcss-value-parser "^4.1.0" - -postcss-modules-scope@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz#9ef3151456d3bbfa120ca44898dfca6f2fa01f06" - integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== - dependencies: - postcss-selector-parser "^6.0.4" - -postcss-modules-values@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-4.0.0.tgz#d7c5e7e68c3bb3c9b27cbf48ca0bb3ffb4602c9c" - integrity sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ== - dependencies: - icss-utils "^5.0.0" - -postcss-nested@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c" - integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ== - dependencies: - postcss-selector-parser "^6.0.11" - -postcss-nesting@^10.2.0: - version "10.2.0" - resolved "https://registry.yarnpkg.com/postcss-nesting/-/postcss-nesting-10.2.0.tgz#0b12ce0db8edfd2d8ae0aaf86427370b898890be" - integrity sha512-EwMkYchxiDiKUhlJGzWsD9b2zvq/r2SSubcRrgP+jujMXFzqvANLt16lJANC+5uZ6hjI7lpRmI6O8JIl+8l1KA== - dependencies: - "@csstools/selector-specificity" "^2.0.0" - postcss-selector-parser "^6.0.10" - -postcss-normalize-charset@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz#9302de0b29094b52c259e9b2cf8dc0879879f0ed" - integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== - -postcss-normalize-display-values@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz#72abbae58081960e9edd7200fcf21ab8325c3da8" - integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-positions@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz#ef97279d894087b59325b45c47f1e863daefbb92" - integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-repeat-style@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz#e9eb96805204f4766df66fd09ed2e13545420fb2" - integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-string@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz#411961169e07308c82c1f8c55f3e8a337757e228" - integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-timing-functions@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz#d5614410f8f0b2388e9f240aa6011ba6f52dafbb" - integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize-unicode@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.1.tgz#f67297fca3fea7f17e0d2caa40769afc487aa030" - integrity sha512-qnCL5jzkNUmKVhZoENp1mJiGNPcsJCs1aaRmURmeJGES23Z/ajaln+EPTD+rBeNkSryI+2WTdW+lwcVdOikrpA== - dependencies: - browserslist "^4.21.4" - postcss-value-parser "^4.2.0" - -postcss-normalize-url@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz#ed9d88ca82e21abef99f743457d3729a042adcdc" - integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== - dependencies: - normalize-url "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-normalize-whitespace@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz#08a1a0d1ffa17a7cc6efe1e6c9da969cc4493cfa" - integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-normalize@^10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/postcss-normalize/-/postcss-normalize-10.0.1.tgz#464692676b52792a06b06880a176279216540dd7" - integrity sha512-+5w18/rDev5mqERcG3W5GZNMJa1eoYYNGo8gB7tEwaos0ajk3ZXAI4mHGcNT47NE+ZnZD1pEpUOFLvltIwmeJA== - dependencies: - "@csstools/normalize.css" "*" - postcss-browser-comments "^4" - sanitize.css "*" - -postcss-opacity-percentage@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/postcss-opacity-percentage/-/postcss-opacity-percentage-1.1.3.tgz#5b89b35551a556e20c5d23eb5260fbfcf5245da6" - integrity sha512-An6Ba4pHBiDtyVpSLymUUERMo2cU7s+Obz6BTrS+gxkbnSBNKSuD0AVUc+CpBMrpVPKKfoVz0WQCX+Tnst0i4A== - -postcss-ordered-values@^5.1.3: - version "5.1.3" - resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz#b6fd2bd10f937b23d86bc829c69e7732ce76ea38" - integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== - dependencies: - cssnano-utils "^3.1.0" - postcss-value-parser "^4.2.0" - -postcss-overflow-shorthand@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/postcss-overflow-shorthand/-/postcss-overflow-shorthand-3.0.4.tgz#7ed6486fec44b76f0eab15aa4866cda5d55d893e" - integrity sha512-otYl/ylHK8Y9bcBnPLo3foYFLL6a6Ak+3EQBPOTR7luMYCOsiVTUk1iLvNf6tVPNGXcoL9Hoz37kpfriRIFb4A== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-page-break@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/postcss-page-break/-/postcss-page-break-3.0.4.tgz#7fbf741c233621622b68d435babfb70dd8c1ee5f" - integrity sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ== - -postcss-place@^7.0.5: - version "7.0.5" - resolved "https://registry.yarnpkg.com/postcss-place/-/postcss-place-7.0.5.tgz#95dbf85fd9656a3a6e60e832b5809914236986c4" - integrity sha512-wR8igaZROA6Z4pv0d+bvVrvGY4GVHihBCBQieXFY3kuSuMyOmEnnfFzHl/tQuqHZkfkIVBEbDvYcFfHmpSet9g== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-preset-env@^7.0.1: - version "7.8.3" - resolved "https://registry.yarnpkg.com/postcss-preset-env/-/postcss-preset-env-7.8.3.tgz#2a50f5e612c3149cc7af75634e202a5b2ad4f1e2" - integrity sha512-T1LgRm5uEVFSEF83vHZJV2z19lHg4yJuZ6gXZZkqVsqv63nlr6zabMH3l4Pc01FQCyfWVrh2GaUeCVy9Po+Aag== - dependencies: - "@csstools/postcss-cascade-layers" "^1.1.1" - "@csstools/postcss-color-function" "^1.1.1" - "@csstools/postcss-font-format-keywords" "^1.0.1" - "@csstools/postcss-hwb-function" "^1.0.2" - "@csstools/postcss-ic-unit" "^1.0.1" - "@csstools/postcss-is-pseudo-class" "^2.0.7" - "@csstools/postcss-nested-calc" "^1.0.0" - "@csstools/postcss-normalize-display-values" "^1.0.1" - "@csstools/postcss-oklab-function" "^1.1.1" - "@csstools/postcss-progressive-custom-properties" "^1.3.0" - "@csstools/postcss-stepped-value-functions" "^1.0.1" - "@csstools/postcss-text-decoration-shorthand" "^1.0.0" - "@csstools/postcss-trigonometric-functions" "^1.0.2" - "@csstools/postcss-unset-value" "^1.0.2" - autoprefixer "^10.4.13" - browserslist "^4.21.4" - css-blank-pseudo "^3.0.3" - css-has-pseudo "^3.0.4" - css-prefers-color-scheme "^6.0.3" - cssdb "^7.1.0" - postcss-attribute-case-insensitive "^5.0.2" - postcss-clamp "^4.1.0" - postcss-color-functional-notation "^4.2.4" - postcss-color-hex-alpha "^8.0.4" - postcss-color-rebeccapurple "^7.1.1" - postcss-custom-media "^8.0.2" - postcss-custom-properties "^12.1.10" - postcss-custom-selectors "^6.0.3" - postcss-dir-pseudo-class "^6.0.5" - postcss-double-position-gradients "^3.1.2" - postcss-env-function "^4.0.6" - postcss-focus-visible "^6.0.4" - postcss-focus-within "^5.0.4" - postcss-font-variant "^5.0.0" - postcss-gap-properties "^3.0.5" - postcss-image-set-function "^4.0.7" - postcss-initial "^4.0.1" - postcss-lab-function "^4.2.1" - postcss-logical "^5.0.4" - postcss-media-minmax "^5.0.0" - postcss-nesting "^10.2.0" - postcss-opacity-percentage "^1.1.2" - postcss-overflow-shorthand "^3.0.4" - postcss-page-break "^3.0.4" - postcss-place "^7.0.5" - postcss-pseudo-class-any-link "^7.1.6" - postcss-replace-overflow-wrap "^4.0.0" - postcss-selector-not "^6.0.1" - postcss-value-parser "^4.2.0" - -postcss-pseudo-class-any-link@^7.1.6: - version "7.1.6" - resolved "https://registry.yarnpkg.com/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-7.1.6.tgz#2693b221902da772c278def85a4d9a64b6e617ab" - integrity sha512-9sCtZkO6f/5ML9WcTLcIyV1yz9D1rf0tWc+ulKcvV30s0iZKS/ONyETvoWsr6vnrmW+X+KmuK3gV/w5EWnT37w== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-reduce-initial@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-5.1.2.tgz#798cd77b3e033eae7105c18c9d371d989e1382d6" - integrity sha512-dE/y2XRaqAi6OvjzD22pjTUQ8eOfc6m/natGHgKFBK9DxFmIm69YmaRVQrGgFlEfc1HePIurY0TmDeROK05rIg== - dependencies: - browserslist "^4.21.4" - caniuse-api "^3.0.0" - -postcss-reduce-transforms@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz#333b70e7758b802f3dd0ddfe98bb1ccfef96b6e9" - integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== - dependencies: - postcss-value-parser "^4.2.0" - -postcss-replace-overflow-wrap@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-4.0.0.tgz#d2df6bed10b477bf9c52fab28c568b4b29ca4319" - integrity sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw== - -postcss-selector-not@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/postcss-selector-not/-/postcss-selector-not-6.0.1.tgz#8f0a709bf7d4b45222793fc34409be407537556d" - integrity sha512-1i9affjAe9xu/y9uqWH+tD4r6/hDaXJruk8xn2x1vzxC2U3J3LKO3zJW4CyxlNhA56pADJ/djpEwpH1RClI2rQ== - dependencies: - postcss-selector-parser "^6.0.10" - -postcss-selector-parser@^6.0.10, postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.9: - version "6.0.13" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.13.tgz#d05d8d76b1e8e173257ef9d60b706a8e5e99bf1b" - integrity sha512-EaV1Gl4mUEV4ddhDnv/xtj7sxwrwxdetHdWUGnT4VJQf+4d05v6lHYZr8N573k5Z0BViss7BDhfWtKS3+sfAqQ== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - -postcss-svgo@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-5.1.0.tgz#0a317400ced789f233a28826e77523f15857d80d" - integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== - dependencies: - postcss-value-parser "^4.2.0" - svgo "^2.7.0" - -postcss-unique-selectors@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz#a9f273d1eacd09e9aa6088f4b0507b18b1b541b6" - integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== +pkg-types@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-1.0.3.tgz#988b42ab19254c01614d13f4f65a2cfc7880f868" + integrity sha512-nN7pYi0AQqJnoLPC9eHFQ8AcyaixBUOwvqc5TDnIKCMEE6I0y8P7OKA7fPexsXGCGxQDl/cmrLAp26LhcwxZ4A== dependencies: - postcss-selector-parser "^6.0.5" + jsonc-parser "^3.2.0" + mlly "^1.2.0" + pathe "^1.1.0" -postcss-value-parser@^4.0.0, postcss-value-parser@^4.0.2, postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: +postcss-value-parser@^4.0.2: version "4.2.0" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== -postcss@^7.0.35: - version "7.0.39" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== - dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" - -postcss@^8.3.5, postcss@^8.4.21, postcss@^8.4.23, postcss@^8.4.4: - version "8.4.24" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.24.tgz#f714dba9b2284be3cc07dbd2fc57ee4dc972d2df" - integrity sha512-M0RzbcI0sO/XJNucsGjvWU9ERWxb/ytp1w6dKtxTKgixdtQDq4rmx/g8W1hnaheq9jgwL/oyEdH5Bc4WwJKMqg== +postcss@^8.4.27: + version "8.4.31" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d" + integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== dependencies: nanoid "^3.3.6" picocolors "^1.0.0" @@ -13017,48 +8427,12 @@ prelude-ls@^1.2.1: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= - -pretender@^3.4.3: - version "3.4.3" - resolved "https://registry.npmjs.org/pretender/-/pretender-3.4.3.tgz#a3b4160516007075d29127262f3a0063d19896e9" - integrity sha512-AlbkBly9R8KR+R0sTCJ/ToOeEoUMtt52QVCetui5zoSmeLOU3S8oobFsyPLm1O2txR6t58qDNysqPnA1vVi8Hg== - dependencies: - fake-xml-http-request "^2.1.1" - route-recognizer "^0.3.3" - prettier@^2.8.8: version "2.8.8" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== -pretty-bytes@^5.3.0, pretty-bytes@^5.4.1: - version "5.6.0" - resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" - integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== - -pretty-error@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" - integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== - dependencies: - lodash "^4.17.20" - renderkid "^3.0.0" - -pretty-format@^26.0.0, pretty-format@^26.6.2: - version "26.6.2" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-26.6.2.tgz#e35c2705f14cb7fe2fe94fa078345b444120fc93" - integrity sha512-7AeGuCYNGmycyQbCqd/3PWH4eOoX/OiCa0uphp57NVTeAGdJGaAliecxwBDHYQCIvrW7aDBZCYeNTP/WX69mkg== - dependencies: - "@jest/types" "^26.6.2" - ansi-regex "^5.0.0" - ansi-styles "^4.0.0" - react-is "^17.0.1" - -pretty-format@^27.5.1: +pretty-format@^27.0.2: version "27.5.1" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-27.5.1.tgz#2181879fdea51a7a5851fb39d920faa63f01d88e" integrity sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ== @@ -13067,13 +8441,12 @@ pretty-format@^27.5.1: ansi-styles "^5.0.0" react-is "^17.0.1" -pretty-format@^28.1.3: - version "28.1.3" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" - integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== +pretty-format@^29.5.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" + integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== dependencies: - "@jest/schemas" "^28.1.3" - ansi-regex "^5.0.1" + "@jest/schemas" "^29.6.3" ansi-styles "^5.0.0" react-is "^18.0.0" @@ -13082,11 +8455,6 @@ prismjs@^1.22.0, prismjs@^1.27.0, prismjs@~1.23.0: resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12" integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q== -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" @@ -13094,21 +8462,6 @@ promise@^7.1.1: dependencies: asap "~2.0.3" -promise@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/promise/-/promise-8.1.0.tgz#697c25c3dfe7435dd79fcd58c38a135888eaf05e" - integrity sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q== - dependencies: - asap "~2.0.6" - -prompts@^2.0.1, prompts@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - prop-types@15.7.2: version "15.7.2" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" @@ -13203,12 +8556,12 @@ prosemirror-keymap@^1.0.0, prosemirror-keymap@^1.1.2, prosemirror-keymap@^1.2.0: prosemirror-state "^1.0.0" w3c-keyname "^2.2.0" -prosemirror-model@^1.0.0, prosemirror-model@^1.16.0, prosemirror-model@^1.18.3, prosemirror-model@^1.8.1: - version "1.18.3" - resolved "https://registry.yarnpkg.com/prosemirror-model/-/prosemirror-model-1.18.3.tgz#d1026a78cff928fd600e90d87cf7d162e0a4e3fd" - integrity sha512-yUVejauEY3F1r7PDy4UJKEGeIU+KFc71JQl5sNvG66CLVdKXRjhWpBW6KMeduGsmGOsw85f6EGrs6QxIKOVILA== +prosemirror-model@1.8.2, prosemirror-model@^1.0.0, prosemirror-model@^1.1.0, prosemirror-model@^1.18.3, prosemirror-model@^1.8.1: + version "1.8.2" + resolved "https://registry.yarnpkg.com/prosemirror-model/-/prosemirror-model-1.8.2.tgz#c74eaacb0bbfea49b59a6d89fef5516181666a56" + integrity sha512-piffokzW7opZVCjf/9YaoXvTC0g7zMRWKJib1hpphPfC+4x6ZXe5CiExgycoWZJe59VxxP7uHX8aFiwg2i9mUQ== dependencies: - orderedmap "^2.0.0" + orderedmap "^1.1.0" prosemirror-paste-rules@^2.0.3: version "2.0.3" @@ -13240,14 +8593,13 @@ prosemirror-schema-list@^1.2.2: prosemirror-state "^1.0.0" prosemirror-transform "^1.0.0" -prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.3.1, prosemirror-state@^1.4.1, prosemirror-state@^1.4.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/prosemirror-state/-/prosemirror-state-1.4.2.tgz#f93bd8a33a4454efab917ba9b738259d828db7e5" - integrity sha512-puuzLD2mz/oTdfgd8msFbe0A42j5eNudKAAPDB0+QJRw8cO1ygjLmhLrg9RvDpf87Dkd6D4t93qdef00KKNacQ== +prosemirror-state@1.3.2, prosemirror-state@^1.0.0, prosemirror-state@^1.2.2, prosemirror-state@^1.3.1, prosemirror-state@^1.4.1, prosemirror-state@^1.4.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/prosemirror-state/-/prosemirror-state-1.3.2.tgz#1b910b0dc01c1f00926bb9ba1589f7b7ac0d658b" + integrity sha512-t/JqE3aR0SV9QrzFVkAXsQwsgrQBNs/BDbcFH20RssW0xauqNNdjTXxy/J/kM7F+0zYi6+BRmz7cMMQQFU3mwQ== dependencies: prosemirror-model "^1.0.0" prosemirror-transform "^1.0.0" - prosemirror-view "^1.27.0" prosemirror-suggest@^2.0.3: version "2.0.3" @@ -13281,52 +8633,37 @@ prosemirror-trailing-node@^2.0.3: "@remirror/core-helpers" "^2.0.1" escape-string-regexp "^4.0.0" -prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transform@^1.2.1, prosemirror-transform@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/prosemirror-transform/-/prosemirror-transform-1.7.0.tgz#a8a0768f3ee6418d26ebef435beda9d43c65e472" - integrity sha512-O4T697Cqilw06Zvc3Wm+e237R6eZtJL/xGMliCi+Uo8VL6qHk6afz1qq0zNjT3eZMuYwnP8ZS0+YxX/tfcE9TQ== +prosemirror-transform@1.2.2, prosemirror-transform@^1.0.0, prosemirror-transform@^1.1.0, prosemirror-transform@^1.2.1, prosemirror-transform@^1.7.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/prosemirror-transform/-/prosemirror-transform-1.2.2.tgz#4439ae7e88ea1395d9beed6a4cd852d72b16ed2f" + integrity sha512-expO11jAsxaHk2RdZtzPsumc1bAAZi4UiXwTLQbftsdnIUWZE5Snyag595p1lx/B8QHUZ6tYWWOaOkzXKoJmYw== dependencies: prosemirror-model "^1.0.0" -prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.27.0, prosemirror-view@^1.29.1: - version "1.29.1" - resolved "https://registry.yarnpkg.com/prosemirror-view/-/prosemirror-view-1.29.1.tgz#9a4938d1a863ca76e23c6573d30e3ece2b17d9a0" - integrity sha512-OhujVZSDsh0l0PyHNdfaBj6DBkbhYaCfbaxmTeFrMKd/eWS+G6IC+OAbmR9IsLC8Se1HSbphMaXnsXjupHL3UQ== +prosemirror-view@1.13.4, prosemirror-view@^1.0.0, prosemirror-view@^1.1.0, prosemirror-view@^1.13.3, prosemirror-view@^1.27.0, prosemirror-view@^1.29.1: + version "1.13.4" + resolved "https://registry.yarnpkg.com/prosemirror-view/-/prosemirror-view-1.13.4.tgz#01d873db7731e0aacc410a9038447d1b7536fd07" + integrity sha512-mtgWEK16uYQFk3kijRlkSpAmDuy7rxYuv0pgyEBDmLT1PCPY8380CoaYnP8znUT6BXIGlJ8oTveK3M50U+B0vw== dependencies: - prosemirror-model "^1.16.0" + prosemirror-model "^1.1.0" prosemirror-state "^1.0.0" prosemirror-transform "^1.1.0" -proxy-addr@~2.0.7: - version "2.0.7" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" - integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== - dependencies: - forwarded "0.2.0" - ipaddr.js "1.9.1" - -proxy-from-env@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== - prr@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= + integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== -ps-tree@1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/ps-tree/-/ps-tree-1.2.0.tgz#5e7425b89508736cdd4f2224d028f7bb3f722ebd" - integrity sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA== - dependencies: - event-stream "=3.3.4" - -psl@^1.1.28, psl@^1.1.33: +psl@^1.1.28: version "1.8.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== +psl@^1.1.33: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + punycode@^1.3.2: version "1.4.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" @@ -13337,6 +8674,11 @@ punycode@^2.1.0, punycode@^2.1.1: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +punycode@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + pvtsutils@^1.3.2, pvtsutils@^1.3.5: version "1.3.5" resolved "https://registry.yarnpkg.com/pvtsutils/-/pvtsutils-1.3.5.tgz#b8705b437b7b134cd7fd858f025a23456f1ce910" @@ -13344,22 +8686,10 @@ pvtsutils@^1.3.2, pvtsutils@^1.3.5: dependencies: tslib "^2.6.1" -pvutils@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/pvutils/-/pvutils-1.1.3.tgz#f35fc1d27e7cd3dfbd39c0826d173e806a03f5a3" - integrity sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ== - -q@^1.1.2: - version "1.5.1" - resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" - integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= - -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== - dependencies: - side-channel "^1.0.4" +pvutils@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/pvutils/-/pvutils-1.1.3.tgz#f35fc1d27e7cd3dfbd39c0826d173e806a03f5a3" + integrity sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ== qs@~6.5.2: version "6.5.3" @@ -13386,7 +8716,7 @@ query-string@^6.13.8: split-on-first "^1.0.0" strict-uri-encode "^2.0.0" -querystringify@^2.2.0: +querystringify@^2.1.1, querystringify@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== @@ -13396,40 +8726,6 @@ queue-microtask@^1.2.2: resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== -raf@^3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/raf/-/raf-3.4.1.tgz#0742e99a4a6552f445d73e3ee0328af0ff1ede39" - integrity sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA== - dependencies: - performance-now "^2.1.0" - -ramda@0.26.1: - version "0.26.1" - resolved "https://registry.npmjs.org/ramda/-/ramda-0.26.1.tgz#8d41351eb8111c55353617fc3bbffad8e4d35d06" - integrity sha512-hLWjpy7EnsDBb0p+Z3B7rPi3GDeRG5ZtiI33kJhTt+ORCd38AbAIjB/9zRIUoeTbE/AVX5ZkU7m6bznsvrf8eQ== - -randombytes@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" - integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== - dependencies: - safe-buffer "^5.1.0" - -range-parser@^1.2.1, range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" - rc-align@^4.0.0: version "4.0.9" resolved "https://registry.yarnpkg.com/rc-align/-/rc-align-4.0.9.tgz#46d8801c4a139ff6a65ad1674e8efceac98f85f2" @@ -13802,18 +9098,6 @@ rc-virtual-list@^3.2.0, rc-virtual-list@^3.4.8: rc-resize-observer "^1.0.0" rc-util "^5.15.0" -react-app-polyfill@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz#95221e0a9bd259e5ca6b177c7bb1cb6768f68fd7" - integrity sha512-sZ41cxiU5llIB003yxxQBYrARBqe0repqPTTYBTmMqTz9szeBbE37BehCE891NZsmdZqqP+xWKdT3eo3vOzN8w== - dependencies: - core-js "^3.19.2" - object-assign "^4.1.1" - promise "^8.1.0" - raf "^3.4.1" - regenerator-runtime "^0.13.9" - whatwg-fetch "^3.6.2" - react-clientside-effect@^1.2.5: version "1.2.6" resolved "https://registry.yarnpkg.com/react-clientside-effect/-/react-clientside-effect-1.2.6.tgz#29f9b14e944a376b03fb650eed2a754dd128ea3a" @@ -13834,36 +9118,6 @@ react-color@^2.19.3: reactcss "^1.2.0" tinycolor2 "^1.4.1" -react-dev-utils@^12.0.1: - version "12.0.1" - resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73" - integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ== - dependencies: - "@babel/code-frame" "^7.16.0" - address "^1.1.2" - browserslist "^4.18.1" - chalk "^4.1.2" - cross-spawn "^7.0.3" - detect-port-alt "^1.1.6" - escape-string-regexp "^4.0.0" - filesize "^8.0.6" - find-up "^5.0.0" - fork-ts-checker-webpack-plugin "^6.5.0" - global-modules "^2.0.0" - globby "^11.0.4" - gzip-size "^6.0.0" - immer "^9.0.7" - is-root "^2.1.0" - loader-utils "^3.2.0" - open "^8.4.0" - pkg-up "^3.1.0" - prompts "^2.4.2" - react-error-overlay "^6.0.11" - recursive-readdir "^2.2.2" - shell-quote "^1.7.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - react-dom@^17.0.0: version "17.0.2" resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23" @@ -13873,11 +9127,6 @@ react-dom@^17.0.0: object-assign "^4.1.1" scheduler "^0.20.2" -react-error-overlay@^6.0.11: - version "6.0.11" - resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb" - integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg== - react-fast-compare@^3.2.0: version "3.2.1" resolved "https://registry.yarnpkg.com/react-fast-compare/-/react-fast-compare-3.2.1.tgz#53933d9e14f364281d6cba24bfed7a4afb808b5f" @@ -13921,6 +9170,11 @@ react-icons@4.3.1: resolved "https://registry.yarnpkg.com/react-icons/-/react-icons-4.3.1.tgz#2fa92aebbbc71f43d2db2ed1aed07361124e91ca" integrity sha512-cB10MXLTs3gVuXimblAdI71jrJx8njrJZmNMEMC+sQu5B/BIOmlsAjskdqpn81y8UBVEGuHODd7/ci5DvoSzTQ== +react-intersection-observer@^9.5.3: + version "9.5.3" + resolved "https://registry.yarnpkg.com/react-intersection-observer/-/react-intersection-observer-9.5.3.tgz#f47a31ed3a0359cbbfdb91a53d7470ac2ab7b3c7" + integrity sha512-NJzagSdUPS5rPhaLsHXYeJbsvdpbJwL6yCHtMk91hc0ufQ2BnXis+0QQ9NBh6n9n+Q3OyjR6OQLShYbaNBkThQ== + react-is@^16.12.0, react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1: version "16.13.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" @@ -13960,95 +9214,39 @@ react-markdown@6.0.2: unist-util-visit "^2.0.0" vfile "^4.0.0" -react-refresh@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046" - integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A== +react-refresh@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.14.0.tgz#4e02825378a5f227079554d4284889354e5f553e" + integrity sha512-wViHqhAd8OHeLS/IRMJjTSDHF3U9eWi62F/MledQGPdJGDhodXJ9PBLNGr6WWL7qlH12Mt3TyTpbS+hGXMjCzQ== -react-router-dom@^5.1.6: - version "5.2.0" - resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.2.0.tgz#9e65a4d0c45e13289e66c7b17c7e175d0ea15662" - integrity sha512-gxAmfylo2QUjcwxI63RhQ5G85Qqt4voZpUXSEqCwykV0baaOTQDR1f0PmY8AELqIyVc0NEZUj0Gov5lNGcXgsA== +react-router-dom@^5.3: + version "5.3.4" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.4.tgz#2ed62ffd88cae6db134445f4a0c0ae8b91d2e5e6" + integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ== dependencies: - "@babel/runtime" "^7.1.2" + "@babel/runtime" "^7.12.13" history "^4.9.0" loose-envify "^1.3.1" prop-types "^15.6.2" - react-router "5.2.0" + react-router "5.3.4" tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-router@5.2.0, react-router@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.2.0.tgz#424e75641ca8747fbf76e5ecca69781aa37ea293" - integrity sha512-smz1DUuFHRKdcJC0jobGo8cVbhO3x50tCL4icacOlcwDOEQPq4TMqwx3sY1TP+DvtTgz4nm3thuo7A+BK2U0Dw== +react-router@5.3.4, react-router@^5.3: + version "5.3.4" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.4.tgz#8ca252d70fcc37841e31473c7a151cf777887bb5" + integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== dependencies: - "@babel/runtime" "^7.1.2" + "@babel/runtime" "^7.12.13" history "^4.9.0" hoist-non-react-statics "^3.1.0" loose-envify "^1.3.1" - mini-create-react-context "^0.4.0" path-to-regexp "^1.7.0" prop-types "^15.6.2" react-is "^16.6.0" tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-scripts@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003" - integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ== - dependencies: - "@babel/core" "^7.16.0" - "@pmmmwh/react-refresh-webpack-plugin" "^0.5.3" - "@svgr/webpack" "^5.5.0" - babel-jest "^27.4.2" - babel-loader "^8.2.3" - babel-plugin-named-asset-import "^0.3.8" - babel-preset-react-app "^10.0.1" - bfj "^7.0.2" - browserslist "^4.18.1" - camelcase "^6.2.1" - case-sensitive-paths-webpack-plugin "^2.4.0" - css-loader "^6.5.1" - css-minimizer-webpack-plugin "^3.2.0" - dotenv "^10.0.0" - dotenv-expand "^5.1.0" - eslint "^8.3.0" - eslint-config-react-app "^7.0.1" - eslint-webpack-plugin "^3.1.1" - file-loader "^6.2.0" - fs-extra "^10.0.0" - html-webpack-plugin "^5.5.0" - identity-obj-proxy "^3.0.0" - jest "^27.4.3" - jest-resolve "^27.4.2" - jest-watch-typeahead "^1.0.0" - mini-css-extract-plugin "^2.4.5" - postcss "^8.4.4" - postcss-flexbugs-fixes "^5.0.2" - postcss-loader "^6.2.1" - postcss-normalize "^10.0.1" - postcss-preset-env "^7.0.1" - prompts "^2.4.2" - react-app-polyfill "^3.0.0" - react-dev-utils "^12.0.1" - react-refresh "^0.11.0" - resolve "^1.20.0" - resolve-url-loader "^4.0.0" - sass-loader "^12.3.0" - semver "^7.3.5" - source-map-loader "^3.0.0" - style-loader "^3.3.1" - tailwindcss "^3.0.2" - terser-webpack-plugin "^5.2.5" - webpack "^5.64.4" - webpack-dev-server "^4.6.0" - webpack-manifest-plugin "^4.0.2" - workbox-webpack-plugin "^6.4.1" - optionalDependencies: - fsevents "^2.3.2" - react-syntax-highlighter@^15.4.4: version "15.4.4" resolved "https://registry.yarnpkg.com/react-syntax-highlighter/-/react-syntax-highlighter-15.4.4.tgz#dc9043f19e7bd063ff3ea78986d22a6eaa943b2a" @@ -14139,35 +9337,6 @@ reactour@1.18.7: scroll-smooth "1.1.1" scrollparent "2.0.1" -read-cache@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" - integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== - dependencies: - pify "^2.3.0" - -readable-stream@^2.0.1: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@^3.0.6: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" - integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== - dependencies: - inherits "^2.0.3" - string_decoder "^1.1.1" - util-deprecate "^1.0.1" - readable-stream@^3.4.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" @@ -14184,13 +9353,6 @@ readdirp@~3.6.0: dependencies: picomatch "^2.2.1" -recursive-readdir@^2.2.2: - version "2.2.3" - resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.3.tgz#e726f328c0d69153bcabd5c322d3195252379372" - integrity sha512-8HrF5ZsXk5FAH9dgsx3BlUer73nIhuj+9OrQwEbLTPOBzGkL1lsFCR01am+v+0m2Cmbs1nP12hLDl5FA7EszKA== - dependencies: - minimatch "^3.0.5" - redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -14224,43 +9386,11 @@ refractor@3.3.1, refractor@^3.2.0, refractor@^3.3.1: parse-entities "^2.0.0" prismjs "~1.23.0" -regenerate-unicode-properties@^10.1.0: - version "10.1.0" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c" - integrity sha512-d1VudCLoIGitcU/hEg2QqvyGZQmdC0Lf8BqdOMXGFSvJP4bNV1+XqbPQeHHLD51Jh4QJJ225dlIFvY4Ly6MXmQ== - dependencies: - regenerate "^1.4.2" - -regenerate@^1.4.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" - integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== - -regenerator-runtime@^0.13.11, regenerator-runtime@^0.13.4, regenerator-runtime@^0.13.9: +regenerator-runtime@^0.13.11, regenerator-runtime@^0.13.4: version "0.13.11" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== -regenerator-transform@^0.15.1: - version "0.15.1" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.1.tgz#f6c4e99fc1b4591f780db2586328e4d9a9d8dc56" - integrity sha512-knzmNAcuyxV+gQCufkYcvOqX/qIIfHLv0u5x79kRxuGojfYVky1f15TzZEu2Avte8QGepvUNTnLskf8E6X6Vyg== - dependencies: - "@babel/runtime" "^7.8.4" - -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - -regex-parser@^2.2.11: - version "2.2.11" - resolved "https://registry.yarnpkg.com/regex-parser/-/regex-parser-2.2.11.tgz#3b37ec9049e19479806e878cabe7c1ca83ccfe58" - integrity sha512-jbD/FT0+9MBU2XAZluI7w2OBs1RBi6p9M83nkoZayQXXU9e8Robt69FcZc7wU4eJD/YFTjn1JdCk3rbMJajz8Q== - regexp.prototype.flags@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" @@ -14270,24 +9400,14 @@ regexp.prototype.flags@^1.4.3: define-properties "^1.1.3" functions-have-names "^1.2.2" -regexpu-core@^5.3.1: - version "5.3.2" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" - integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== - dependencies: - "@babel/regjsgen" "^0.8.0" - regenerate "^1.4.2" - regenerate-unicode-properties "^10.1.0" - regjsparser "^0.9.1" - unicode-match-property-ecmascript "^2.0.0" - unicode-match-property-value-ecmascript "^2.1.0" - -regjsparser@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" - integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== +regexp.prototype.flags@^1.5.0: + version "1.5.1" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e" + integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg== dependencies: - jsesc "~0.5.0" + call-bind "^1.0.2" + define-properties "^1.2.0" + set-function-name "^2.0.0" rehype-parse@^7.0.0: version "7.0.1" @@ -14320,11 +9440,6 @@ rehype@11.0.0: rehype-stringify "^8.0.0" unified "^9.0.0" -relateurl@^0.2.7: - version "0.2.7" - resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" - integrity sha1-VNvzd+UUQKypCkzSdGANP/LYiKk= - relay-runtime@12.0.0: version "12.0.0" resolved "https://registry.yarnpkg.com/relay-runtime/-/relay-runtime-12.0.0.tgz#1e039282bdb5e0c1b9a7dc7f6b9a09d4f4ff8237" @@ -14449,23 +9564,7 @@ remove-trailing-spaces@^1.0.6: resolved "https://registry.yarnpkg.com/remove-trailing-spaces/-/remove-trailing-spaces-1.0.8.tgz#4354d22f3236374702f58ee373168f6d6887ada7" integrity sha512-O3vsMYfWighyFbTd8hk8VaSj9UAGENxAtX+//ugIst2RMk5e03h6RoIS+0ylsFxY1gvmPuAY/PO4It+gPEeySA== -renderkid@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" - integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== - dependencies: - css-select "^4.1.3" - dom-converter "^0.2.0" - htmlparser2 "^6.1.0" - lodash "^4.17.21" - strip-ansi "^6.0.1" - -repeat-element@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" - integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== - -repeat-string@^1.0.0, repeat-string@^1.6.1: +repeat-string@^1.0.0: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= @@ -14501,11 +9600,6 @@ require-directory@^2.1.1: resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= -require-from-string@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" @@ -14514,21 +9608,14 @@ require-main-filename@^2.0.0: requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== resize-observer-polyfill@^1.5.1: version "1.5.1" resolved "https://registry.yarnpkg.com/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz#0e9020dd3d21024458d4ebd27e23e40269810464" integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@5.0.0, resolve-from@^5.0.0: +resolve-from@5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== @@ -14543,28 +9630,7 @@ resolve-pathname@^3.0.0: resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd" integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== -resolve-url-loader@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-url-loader/-/resolve-url-loader-4.0.0.tgz#d50d4ddc746bb10468443167acf800dcd6c3ad57" - integrity sha512-05VEMczVREcbtT7Bz+C+96eUO5HDNvdthIiMB34t7FcF8ehcu4wC0sSgPUubs3XW2Q3CNLJk/BJrCU9wVRymiA== - dependencies: - adjust-sourcemap-loader "^4.0.0" - convert-source-map "^1.7.0" - loader-utils "^2.0.0" - postcss "^7.0.35" - source-map "0.6.1" - -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - -resolve.exports@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.1.tgz#05cfd5b3edf641571fd46fa608b610dda9ead999" - integrity sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ== - -resolve@^1.1.7, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.22.1, resolve@^1.22.2: +resolve@^1.12.0, resolve@^1.19.0, resolve@^1.22.1: version "1.22.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g== @@ -14590,16 +9656,6 @@ restore-cursor@^3.1.0: onetime "^5.1.0" signal-exit "^3.0.2" -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - -retry@^0.13.1: - version "0.13.1" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" - integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== - reusify@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" @@ -14610,7 +9666,7 @@ rfdc@^1.3.0: resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== -rimraf@^3.0.0, rimraf@^3.0.2: +rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== @@ -14624,20 +9680,10 @@ rimraf@~2.6.2: dependencies: glob "^7.1.3" -rollup-plugin-terser@^7.0.0: - version "7.0.2" - resolved "https://registry.yarnpkg.com/rollup-plugin-terser/-/rollup-plugin-terser-7.0.2.tgz#e8fbba4869981b2dc35ae7e8a502d5c6c04d324d" - integrity sha512-w3iIaU4OxcF52UUXiZNsNeuXIMDvFrr+ZXK6bFZ0Q60qyVfq4uLptoS4bbq3paG3x216eQllFZX7zt6TIImguQ== - dependencies: - "@babel/code-frame" "^7.10.4" - jest-worker "^26.2.1" - serialize-javascript "^4.0.0" - terser "^5.0.0" - -rollup@^2.43.1: - version "2.79.1" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" - integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== +rollup@^3.27.1: + version "3.29.4" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-3.29.4.tgz#4d70c0f9834146df8705bfb69a9a19c9e1109981" + integrity sha512-oWzmBZwvYrU0iJHtDmhsm662rC15FRXmcjCk1xD771dFDx5jJ02ufAQQTn0etB2emNk4J9EZg/yWKpsn9BWGRw== optionalDependencies: fsevents "~2.3.2" @@ -14662,10 +9708,10 @@ round@^2.0.1: precision "~1.0.0" round-precision "~1.0.0" -route-recognizer@^0.3.3: - version "0.3.4" - resolved "https://registry.npmjs.org/route-recognizer/-/route-recognizer-0.3.4.tgz#39ab1ffbce1c59e6d2bdca416f0932611e4f3ca3" - integrity sha512-2+MhsfPhvauN1O8KaXpXAOfR/fwe8dnUXVM+xw7yt40lJRfPVQxV6yryZm0cgRvAj5fMF/mdRZbL2ptwbs5i2g== +rrweb-cssom@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz#ed298055b97cbddcdeb278f904857629dec5e0e1" + integrity sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw== rtl-css-js@^1.14.0: version "1.16.0" @@ -14686,7 +9732,7 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^7.5.5, rxjs@^7.8.1: +rxjs@^7.5.5: version "7.8.1" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== @@ -14698,12 +9744,7 @@ safari-14-idb-fix@^1.0.6: resolved "https://registry.yarnpkg.com/safari-14-idb-fix/-/safari-14-idb-fix-1.0.6.tgz#cbaabc33a4500c44b5c432d6c525b0ed9b68bb65" integrity sha512-oTEQOdMwRX+uCtWCKT1nx2gAeSdpr8elg/2gcaKUH00SJU2xWESfkx11nmXwTRHy7xfQoj1o4TTQvdmuBosTnA== -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.2.0: +safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -14717,40 +9758,20 @@ safe-regex-test@^1.0.0: get-intrinsic "^1.1.3" is-regex "^1.1.4" -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== -sanitize.css@*: - version "13.0.0" - resolved "https://registry.yarnpkg.com/sanitize.css/-/sanitize.css-13.0.0.tgz#2675553974b27964c75562ade3bd85d79879f173" - integrity sha512-ZRwKbh/eQ6w9vmTjkuG0Ioi3HBwPFce0O+v//ve+aOq1oeCy7jMV2qzzAlpsNuqpqCBjjriM1lbtZbF/Q8jVyA== - -sass-loader@^12.3.0: - version "12.6.0" - resolved "https://registry.yarnpkg.com/sass-loader/-/sass-loader-12.6.0.tgz#5148362c8e2cdd4b950f3c63ac5d16dbfed37bcb" - integrity sha512-oLTaH0YCtX4cfnJZxKSLAyglED0naiYfNG1iXfU5w1LNZ+ukoA5DtyDIN5zmKVZwYNJP4KRc5Y3hkWga+7tYfA== - dependencies: - klona "^2.0.4" - neo-async "^2.6.2" - -sax@^1.2.4, sax@~1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== +sax@^1.2.4: + version "1.3.0" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0" + integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA== -saxes@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/saxes/-/saxes-5.0.1.tgz#eebab953fa3b7608dbe94e5dadb15c888fa6696d" - integrity sha512-5LBh1Tls8c9xgGjw3QrMwETmTMVk0oFgvrFSvWx62llR2hcEInrKNZ2GZCCuuy2lvWrdl5jhbpeqc5hRYKFOcw== +saxes@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-6.0.0.tgz#fe5b4a4768df4f14a201b1ba6a65c1f3d9988cc5" + integrity sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA== dependencies: xmlchars "^2.2.0" @@ -14762,43 +9783,6 @@ scheduler@^0.20.2: loose-envify "^1.1.0" object-assign "^4.1.1" -schema-utils@2.7.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== - dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" - -schema-utils@^2.6.5: - version "2.7.1" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" - integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== - dependencies: - "@types/json-schema" "^7.0.5" - ajv "^6.12.4" - ajv-keywords "^3.5.2" - -schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-3.3.0.tgz#f50a88877c3c01652a15b622ae9e9795df7a60fe" - integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== - dependencies: - "@types/json-schema" "^7.0.8" - ajv "^6.12.5" - ajv-keywords "^3.5.2" - -schema-utils@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" - integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== - dependencies: - "@types/json-schema" "^7.0.9" - ajv "^8.9.0" - ajv-formats "^2.1.1" - ajv-keywords "^5.1.0" - screenfull@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/screenfull/-/screenfull-5.2.0.tgz#6533d524d30621fc1283b9692146f3f13a93d1ba" @@ -14826,24 +9810,12 @@ scuid@^1.1.0: resolved "https://registry.yarnpkg.com/scuid/-/scuid-1.1.0.tgz#d3f9f920956e737a60f72d0e4ad280bf324d5dab" integrity sha512-MuCAyrGZcTLfQoH2XoBlQ8C6bzwN88XT/0slOGz0pn8+gIP85BOAfYa44ZXQUTOwRwPU0QvgU+V+OSajl/59Xg== -select-hose@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" - integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= - -selfsigned@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61" - integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ== - dependencies: - node-forge "^1" - semver@^5.6.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + version "5.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: +semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== @@ -14853,31 +9825,19 @@ semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8: +semver@^7.3.7: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== - dependencies: - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "2.0.0" - mime "1.6.0" - ms "2.1.3" - on-finished "2.4.1" - range-parser "~1.2.1" - statuses "2.0.1" +semver@^7.5.4: + version "7.5.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" sentence-case@^3.0.4: version "3.0.4" @@ -14888,85 +9848,40 @@ sentence-case@^3.0.4: tslib "^2.0.3" upper-case-first "^2.0.2" -serialize-javascript@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-4.0.0.tgz#b525e1238489a5ecfc42afacc3fe99e666f4b1aa" - integrity sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw== - dependencies: - randombytes "^2.1.0" - -serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.1.tgz#b206efb27c3da0b0ab6b52f48d170b7996458e5c" - integrity sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w== - dependencies: - randombytes "^2.1.0" - -serve-index@^1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/serve-index/-/serve-index-1.9.1.tgz#d3768d69b1e7d82e5ce050fff5b453bea12a9239" - integrity sha1-03aNabHn2C5c4FD/9bRTvqEqkjk= - dependencies: - accepts "~1.3.4" - batch "0.6.1" - debug "2.6.9" - escape-html "~1.0.3" - http-errors "~1.6.2" - mime-types "~2.1.17" - parseurl "~1.3.2" - -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.18.0" - set-blocking@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= +set-function-length@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" + integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== + dependencies: + define-data-property "^1.1.1" + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + +set-function-name@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.1.tgz#12ce38b7954310b9f61faa12701620a0c882793a" + integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA== + dependencies: + define-data-property "^1.0.1" + functions-have-names "^1.2.3" + has-property-descriptors "^1.0.0" + set-harmonic-interval@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/set-harmonic-interval/-/set-harmonic-interval-1.0.1.tgz#e1773705539cdfb80ce1c3d99e7f298bb3995249" integrity sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g== -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - setimmediate@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= -setprototypeof@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - integrity sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ== - -setprototypeof@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" - integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== - -shallow-clone@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" - integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== - dependencies: - kind-of "^6.0.2" - shallowequal@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.1.0.tgz#188d521de95b9087404fd4dcb68b13df0ae4e7f8" @@ -14998,7 +9913,12 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" -signal-exit@^3.0.2, signal-exit@^3.0.3: +siginfo@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/siginfo/-/siginfo-2.0.0.tgz#32e76c70b79724e3bb567cb9d543eb858ccfaf30" + integrity sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g== + +signal-exit@^3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== @@ -15008,33 +9928,11 @@ signedsource@^1.0.0: resolved "https://registry.yarnpkg.com/signedsource/-/signedsource-1.0.0.tgz#1ddace4981798f93bd833973803d80d52e93ad6a" integrity sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww== -sinon@^11.1.1: - version "11.1.1" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-11.1.1.tgz#99a295a8b6f0fadbbb7e004076f3ae54fc6eab91" - integrity sha512-ZSSmlkSyhUWbkF01Z9tEbxZLF/5tRC9eojCdFh33gtQaP7ITQVaMWQHGuFM7Cuf/KEfihuh1tTl3/ABju3AQMg== - dependencies: - "@sinonjs/commons" "^1.8.3" - "@sinonjs/fake-timers" "^7.1.0" - "@sinonjs/samsam" "^6.0.2" - diff "^5.0.0" - nise "^5.1.0" - supports-color "^7.2.0" - -sisteransi@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - slash@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== -slash@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" - integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== - slice-ansi@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" @@ -15061,50 +9959,6 @@ snake-case@^3.0.4: dot-case "^3.0.4" tslib "^2.0.3" -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -sockjs@^0.3.24: - version "0.3.24" - resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.24.tgz#c9bc8995f33a111bea0395ec30aa3206bdb5ccce" - integrity sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ== - dependencies: - faye-websocket "^0.11.3" - uuid "^8.3.2" - websocket-driver "^0.7.4" - -source-list-map@^2.0.0, source-list-map@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" - integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== - source-map-explorer@^2.5.2: version "2.5.2" resolved "https://registry.yarnpkg.com/source-map-explorer/-/source-map-explorer-2.5.2.tgz#857cab5dd9d1d7175e9c5c2739dc9ccfb99f2dc5" @@ -15123,80 +9977,32 @@ source-map-explorer@^2.5.2: temp "^0.9.4" yargs "^16.2.0" -source-map-js@^1.0.1, source-map-js@^1.0.2: +source-map-js@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== -source-map-loader@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/source-map-loader/-/source-map-loader-3.0.2.tgz#af23192f9b344daa729f6772933194cc5fa54fee" - integrity sha512-BokxPoLjyl3iOrgkWaakaxqnelAJSS+0V+De0kKIq6lyWrXuiPgYTGp6z3iHmqljKAaLXwZa+ctD8GccRJeVvg== - dependencies: - abab "^2.0.5" - iconv-lite "^0.6.3" - source-map-js "^1.0.1" - -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-resolve@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.6.0.tgz#3d9df87e236b53f16d01e58150fc7711138e5ed2" - integrity sha512-KXBr9d/fO/bWo97NXsPIAW1bFSBOuCnjbNTBMO7N59hsv5i9yzRDfcYwwt0l04+VqnKC+EwzvJZIP/qkuMgR/w== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - -source-map-support@^0.5.6, source-map-support@~0.5.20: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-url@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" - integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== - source-map@0.5.6: version "0.5.6" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" integrity sha512-MjZkVp0NHr5+TPihLcadqnlVoGIoWo4IBHptutGh9wI3ttUYvCG26HkSuDi+K6lsZ25syXJXcctwgyVCt//xqA== -source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.5.6, source-map@^0.5.7: +source-map@^0.5.7: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= +source-map@^0.6.1, source-map@~0.6.0: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + source-map@^0.7.3: version "0.7.3" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== -source-map@^0.8.0-beta.0: - version "0.8.0-beta.0" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" - integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== - dependencies: - whatwg-url "^7.0.0" - -sourcemap-codec@^1.4.4, sourcemap-codec@^1.4.8: +sourcemap-codec@^1.4.8: version "1.4.8" resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== @@ -15206,48 +10012,11 @@ space-separated-tokens@^1.0.0, space-separated-tokens@^1.1.0: resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899" integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== -spdy-transport@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" - integrity sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw== - dependencies: - debug "^4.1.0" - detect-node "^2.0.4" - hpack.js "^2.1.6" - obuf "^1.1.2" - readable-stream "^3.0.6" - wbuf "^1.7.3" - -spdy@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/spdy/-/spdy-4.0.2.tgz#b74f466203a3eda452c02492b91fb9e84a27677b" - integrity sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA== - dependencies: - debug "^4.1.0" - handle-thing "^2.0.0" - http-deceiver "^1.2.7" - select-hose "^2.0.0" - spdy-transport "^3.0.0" - split-on-first@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f" integrity sha512-43ZssAJaMusuKWL8sKUBQXHWOpq8d6CfN/u1p4gUzfJkM05C8rxTmYrkIPTXapZpORA6LkkzcUulJ8FqA7Uudw== -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - -split@0.3: - version "0.3.3" - resolved "https://registry.npmjs.org/split/-/split-0.3.3.tgz#cd0eea5e63a211dfff7eb0f091c4133e2d0dd28f" - integrity sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8= - dependencies: - through "2" - sponge-case@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/sponge-case/-/sponge-case-1.0.1.tgz#260833b86453883d974f84854cdb63aecc5aef4c" @@ -15275,11 +10044,6 @@ sshpk@^1.7.0: safer-buffer "^2.0.2" tweetnacl "~0.14.0" -stable@^0.1.8: - version "0.1.8" - resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - stack-generator@^2.0.5: version "2.0.10" resolved "https://registry.yarnpkg.com/stack-generator/-/stack-generator-2.0.10.tgz#8ae171e985ed62287d4f1ed55a1633b3fb53bb4d" @@ -15287,12 +10051,10 @@ stack-generator@^2.0.5: dependencies: stackframe "^1.3.4" -stack-utils@^2.0.3: - version "2.0.6" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" - integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== - dependencies: - escape-string-regexp "^2.0.0" +stackback@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/stackback/-/stackback-0.0.2.tgz#1ac8a0d9483848d1695e418b6d031a3c3ce68e3b" + integrity sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw== stackframe@^1.1.1, stackframe@^1.3.4: version "1.3.4" @@ -15316,49 +10078,22 @@ stacktrace-js@^2.0.2: stack-generator "^2.0.5" stacktrace-gps "^3.0.4" -start-server-and-test@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/start-server-and-test/-/start-server-and-test-2.0.3.tgz#15c53c85e23cba7698b498b8a2598cab95f3f802" - integrity sha512-QsVObjfjFZKJE6CS6bSKNwWZCKBG6975/jKRPPGFfFh+yOQglSeGXiNWjzgQNXdphcBI9nXbyso9tPfX4YAUhg== - dependencies: - arg "^5.0.2" - bluebird "3.7.2" - check-more-types "2.24.0" - debug "4.3.4" - execa "5.1.1" - lazy-ass "1.6.0" - ps-tree "1.2.0" - wait-on "7.2.0" - state-local@^1.0.6: version "1.0.7" resolved "https://registry.yarnpkg.com/state-local/-/state-local-1.0.7.tgz#da50211d07f05748d53009bee46307a37db386d5" integrity sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w== -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= +std-env@^3.3.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.4.3.tgz#326f11db518db751c83fd58574f449b7c3060910" + integrity sha512-f9aPhy8fYBuMN+sNfakZV18U39PbalgjXG3lLB9WkaYTxijru61wb57V9wxxNthXM5Sd88ETBWi29qLAsHO52Q== -stream-combiner@~0.0.4: - version "0.0.4" - resolved "https://registry.npmjs.org/stream-combiner/-/stream-combiner-0.0.4.tgz#4d5e433c185261dde623ca3f44c586bcf5c4ad14" - integrity sha1-TV5DPBhSYd3mI8o/RMWGvPXErRQ= +stop-iteration-iterator@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz#6a60be0b4ee757d1ed5254858ec66b10c49285e4" + integrity sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ== dependencies: - duplexer "~0.1.1" + internal-slot "^1.0.4" streamsearch@^1.1.0: version "1.1.0" @@ -15380,27 +10115,6 @@ string-env-interpolation@^1.0.1: resolved "https://registry.yarnpkg.com/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz#ad4397ae4ac53fe6c91d1402ad6f6a52862c7152" integrity sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg== -string-length@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-length@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-5.0.1.tgz#3d647f497b6e8e8d41e422f7e0b23bc536c8381e" - integrity sha512-9Ep08KAMUn0OadnVaBuRdE2l615CQ508kr0XMadjClfYpdCyvrbFp6Taebo8yyxokQ4viUd/xPPUA4FGgUa0ow== - dependencies: - char-regex "^2.0.0" - strip-ansi "^7.0.1" - -string-natural-compare@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4" - integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw== - string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" @@ -15419,7 +10133,7 @@ string-width@^4.2.3: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string.prototype.matchall@^4.0.6, string.prototype.matchall@^4.0.7, string.prototype.matchall@^4.0.8: +string.prototype.matchall@^4.0.7, string.prototype.matchall@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz#3bf85722021816dcd1bf38bb714915887ca79fd3" integrity sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg== @@ -15458,13 +10172,6 @@ string_decoder@^1.1.1: dependencies: safe-buffer "~5.2.0" -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - stringify-entities@^3.0.1: version "3.1.0" resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-3.1.0.tgz#b8d3feac256d9ffcc9fa1fefdcf3ca70576ee903" @@ -15474,15 +10181,6 @@ stringify-entities@^3.0.1: character-entities-legacy "^1.0.0" xtend "^4.0.0" -stringify-object@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/stringify-object/-/stringify-object-3.3.0.tgz#703065aefca19300d3ce88af4f5b3956d7556629" - integrity sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw== - dependencies: - get-own-enumerable-property-symbols "^3.0.0" - is-obj "^1.0.1" - is-regexp "^1.0.0" - strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -15490,32 +10188,10 @@ strip-ansi@^6.0.0, strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.0.1: - version "7.1.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" - integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== - dependencies: - ansi-regex "^6.0.1" - strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= - -strip-bom@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-comments@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" - integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM= strip-indent@^3.0.0: version "3.0.0" @@ -15524,20 +10200,17 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= - strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -style-loader@^3.3.1: - version "3.3.3" - resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-3.3.3.tgz#bba8daac19930169c0c9c96706749a597ae3acff" - integrity sha512-53BiGLXAcll9maCYtZi2RCQZKa8NQQai5C4horqKyRmHj9H7QmcUyucrH+4KW/gBQbXM2AsB0axoEcFZPlfPcw== +strip-literal@^1.0.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/strip-literal/-/strip-literal-1.3.0.tgz#db3942c2ec1699e6836ad230090b84bb458e3a07" + integrity sha512-PugKzOsyXpArk0yWmUwqOZecSO0GH0bPoctLcqNDH9J04pVW3lflYE0ujElBGTloevcxF5MofAOZ7C5l2b+wLg== + dependencies: + acorn "^8.10.0" style-to-object@^0.3.0: version "0.3.0" @@ -15562,32 +10235,11 @@ styled-components@^5.2.1: shallowequal "^1.1.0" supports-color "^5.5.0" -stylehacks@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-5.1.1.tgz#7934a34eb59d7152149fa69d6e9e56f2fc34bcc9" - integrity sha512-sBpcd5Hx7G6seo7b1LkpttvTz7ikD0LlH5RmdcBNb6fFR0Fl7LQwHDFr300q4cwUqi+IYrFGmsIHieMBfnN/Bw== - dependencies: - browserslist "^4.21.4" - postcss-selector-parser "^6.0.4" - stylis@4.1.3, stylis@^4.0.6: version "4.1.3" resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.1.3.tgz#fd2fbe79f5fed17c55269e16ed8da14c84d069f7" integrity sha512-GP6WDNWf+o403jrEp9c5jibKavrtLW+/qYGhFxFrG8maXhwTBI7gLLhiBb0o7uFccWN+EOS9aMO6cGHWAO07OA== -sucrase@^3.32.0: - version "3.32.0" - resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.32.0.tgz#c4a95e0f1e18b6847127258a75cf360bc568d4a7" - integrity sha512-ydQOU34rpSyj2TGyz4D2p8rbktIOZ8QY9s+DGLvFU1i5pWJE8vkpruCjGCMHsdXwnD7JDcS+noSwM/a7zyNFDQ== - dependencies: - "@jridgewell/gen-mapping" "^0.3.2" - commander "^4.0.0" - glob "7.1.6" - lines-and-columns "^1.1.6" - mz "^2.7.0" - pirates "^4.0.1" - ts-interface-checker "^0.1.9" - supports-color@^5.3.0, supports-color@^5.5.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -15595,34 +10247,19 @@ supports-color@^5.3.0, supports-color@^5.5.0: dependencies: has-flag "^3.0.0" -supports-color@^7.0.0, supports-color@^7.1.0, supports-color@^7.2.0: +supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" -supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" - integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -svg-parser@^2.0.2: +svg-parser@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== @@ -15639,38 +10276,6 @@ svgmoji@^3.2.0: "@svgmoji/openmoji" "^3.2.0" "@svgmoji/twemoji" "^3.2.0" -svgo@^1.2.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" - integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== - dependencies: - chalk "^2.4.1" - coa "^2.0.2" - css-select "^2.0.0" - css-select-base-adapter "^0.1.1" - css-tree "1.0.0-alpha.37" - csso "^4.0.2" - js-yaml "^3.13.1" - mkdirp "~0.5.1" - object.values "^1.1.0" - sax "~1.2.4" - stable "^0.1.8" - unquote "~1.1.1" - util.promisify "~1.0.0" - -svgo@^2.7.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" - integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== - dependencies: - "@trysound/sax" "0.2.0" - commander "^7.2.0" - css-select "^4.1.3" - css-tree "^1.1.3" - csso "^4.2.0" - picocolors "^1.0.0" - stable "^0.1.8" - swap-case@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/swap-case/-/swap-case-2.0.2.tgz#671aedb3c9c137e2985ef51c51f9e98445bf70d9" @@ -15688,50 +10293,6 @@ symbol-tree@^3.2.4: resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== -tailwindcss@^3.0.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.3.2.tgz#2f9e35d715fdf0bbf674d90147a0684d7054a2d3" - integrity sha512-9jPkMiIBXvPc2KywkraqsUfbfj+dHDb+JPWtSJa9MLFdrPyazI7q6WX2sUrm7R9eVR7qqv3Pas7EvQFzxKnI6w== - dependencies: - "@alloc/quick-lru" "^5.2.0" - arg "^5.0.2" - chokidar "^3.5.3" - didyoumean "^1.2.2" - dlv "^1.1.3" - fast-glob "^3.2.12" - glob-parent "^6.0.2" - is-glob "^4.0.3" - jiti "^1.18.2" - lilconfig "^2.1.0" - micromatch "^4.0.5" - normalize-path "^3.0.0" - object-hash "^3.0.0" - picocolors "^1.0.0" - postcss "^8.4.23" - postcss-import "^15.1.0" - postcss-js "^4.0.1" - postcss-load-config "^4.0.1" - postcss-nested "^6.0.1" - postcss-selector-parser "^6.0.11" - postcss-value-parser "^4.2.0" - resolve "^1.22.2" - sucrase "^3.32.0" - -tapable@^1.0.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" - integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== - -tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" - integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== - -temp-dir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e" - integrity sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg== - temp@^0.9.4: version "0.9.4" resolved "https://registry.yarnpkg.com/temp/-/temp-0.9.4.tgz#cd20a8580cb63635d0e4e9d4bd989d44286e7620" @@ -15740,93 +10301,21 @@ temp@^0.9.4: mkdirp "^0.5.1" rimraf "~2.6.2" -tempy@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tempy/-/tempy-0.6.0.tgz#65e2c35abc06f1124a97f387b08303442bde59f3" - integrity sha512-G13vtMYPT/J8A4X2SjdtBTphZlrp1gKv6hZiOjw14RCWg6GbHuQBGtjlx75xLbYV/wEc0D7G5K4rxKP/cXk8Bw== - dependencies: - is-stream "^2.0.0" - temp-dir "^2.0.0" - type-fest "^0.16.0" - unique-string "^2.0.0" - -terminal-link@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - -terser-webpack-plugin@^5.2.5, terser-webpack-plugin@^5.3.7: - version "5.3.9" - resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" - integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== - dependencies: - "@jridgewell/trace-mapping" "^0.3.17" - jest-worker "^27.4.5" - schema-utils "^3.1.1" - serialize-javascript "^6.0.1" - terser "^5.16.8" - -terser@^5.0.0, terser@^5.10.0, terser@^5.16.8: - version "5.18.1" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.18.1.tgz#6d8642508ae9fb7b48768e48f16d675c89a78460" - integrity sha512-j1n0Ao919h/Ai5r43VAnfV/7azUYW43GPxK7qSATzrsERfW7+y2QW9Cp9ufnRF5CQUWbnLSo7UJokSWCqg4tsQ== - dependencies: - "@jridgewell/source-map" "^0.3.3" - acorn "^8.8.2" - commander "^2.20.0" - source-map-support "~0.5.20" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= -thenify-all@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" - integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== - dependencies: - thenify ">= 3.1.0 < 4" - -"thenify@>= 3.1.0 < 4": - version "3.3.1" - resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" - integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== - dependencies: - any-promise "^1.0.0" - -throat@^6.0.1: - version "6.0.2" - resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.2.tgz#51a3fbb5e11ae72e2cf74861ed5c8020f89f29fe" - integrity sha512-WKexMoJj3vEuK0yFEapj8y64V0A6xcuPuK9Gt1d0R+dzCSJc0lHqQytAbSB4cDAK0dWh4T0E2ETkoLE2WZ41OQ== - throttle-debounce@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/throttle-debounce/-/throttle-debounce-3.0.1.tgz#32f94d84dfa894f786c9a1f290e7a645b6a19abb" integrity sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg== -through@2, through@^2.3.6, through@^2.3.8, through@~2.3, through@~2.3.1: +through@^2.3.6, through@^2.3.8: version "2.3.8" resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= -thunky@^1.0.2: - version "1.1.0" - resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" - integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== - tiny-invariant@^1.0.2: version "1.1.0" resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875" @@ -15837,11 +10326,26 @@ tiny-warning@^1.0.0, tiny-warning@^1.0.3: resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== +tinybench@^2.5.0: + version "2.5.1" + resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.5.1.tgz#3408f6552125e53a5a48adee31261686fd71587e" + integrity sha512-65NKvSuAVDP/n4CqH+a9w2kTlLReS9vhsAP06MWx+/89nMinJyB2icyl58RIcqCmIggpojIGeuJGhjU1aGMBSg== + tinycolor2@^1.4.1: version "1.4.2" resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.4.2.tgz#3f6a4d1071ad07676d7fa472e1fac40a719d8803" integrity sha512-vJhccZPs965sV/L2sU4oRQVAos0pQXwsvTLkWYdqJ+a8Q5kPFzJTuOFwy7UniPli44NKQGAglksjvOcpo95aZA== +tinypool@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/tinypool/-/tinypool-0.7.0.tgz#88053cc99b4a594382af23190c609d93fddf8021" + integrity sha512-zSYNUlYSMhJ6Zdou4cJwo/p7w5nmAH17GRfU/ui3ctvjXFErXXkruT4MWW6poDeXgCaIBlGLrfU6TbTXxyGMww== + +tinyspy@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tinyspy/-/tinyspy-2.2.0.tgz#9dc04b072746520b432f77ea2c2d17933de5d6ce" + integrity sha512-d2eda04AN/cPOR89F7Xv5bK/jrQEhmcLFe6HFldoeO9AJtps+fqEnh486vnT/8y4bw38pSyxDcTCAq+Ks2aJTg== + title-case@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/title-case/-/title-case-3.0.3.tgz#bc689b46f02e411f1d1e1d081f7c3deca0489982" @@ -15856,31 +10360,11 @@ tmp@^0.0.33: dependencies: os-tmpdir "~1.0.2" -tmpl@1.0.x: - version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" @@ -15888,34 +10372,20 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - toggle-selection@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/toggle-selection/-/toggle-selection-1.0.6.tgz#6e45b1263f2017fa0acc7d89d78b15b8bf77da32" integrity sha1-bkWxJj8gF/oKzH2J14sVuL932jI= -toidentifier@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" - integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== - -tough-cookie@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.0.0.tgz#d822234eeca882f991f0f908824ad2622ddbece4" - integrity sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg== +tough-cookie@^4.1.2: + version "4.1.3" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" + integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== dependencies: psl "^1.1.33" punycode "^2.1.1" - universalify "^0.1.2" + universalify "^0.2.0" + url-parse "^1.5.3" tough-cookie@~2.5.0: version "2.5.0" @@ -15925,19 +10395,12 @@ tough-cookie@~2.5.0: psl "^1.1.28" punycode "^2.1.1" -tr46@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" - integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== - dependencies: - punycode "^2.1.0" - -tr46@^2.0.2: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" - integrity sha512-15Ih7phfcdP5YxqiB+iDtLoaTz4Nd35+IiAv0kQ5FNKHzXgdWqPoTIqEDDJmXceQt4JZk6lVPT8lnDlPpGDppw== +tr46@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-4.1.1.tgz#281a758dcc82aeb4fe38c7dfe4d11a395aac8469" + integrity sha512-2lv/66T7e5yNyhAAC4NaKe5nVavzuGJQVVtRYLyQ2OI8tsJ61PMLlelehb0wi2Hx6+hT/OJUWZcw8MjlSRnxvw== dependencies: - punycode "^2.1.1" + punycode "^2.3.0" tr46@~0.0.3: version "0.0.3" @@ -15949,21 +10412,16 @@ trough@^1.0.0: resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.5.tgz#b8b639cefad7d0bb2abd37d433ff8293efa5f406" integrity sha512-rvuRbTarPXmMb79SmzEp8aqXNKcK+y0XaB298IXueQ8I2PsrATcPBCSPyK/dDNa2iWOhKlfNnOjdAOTBU/nkFA== -tryer@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8" - integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== +ts-api-utils@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" + integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== ts-easing@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/ts-easing/-/ts-easing-0.2.0.tgz#c8a8a35025105566588d87dbda05dd7fbfa5a4ec" integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== -ts-interface-checker@^0.1.9: - version "0.1.13" - resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" - integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== - ts-invariant@^0.7.0: version "0.7.3" resolved "https://registry.yarnpkg.com/ts-invariant/-/ts-invariant-0.7.3.tgz#13aae22a4a165393aaf5cecdee45ef4128d358b8" @@ -15976,25 +10434,6 @@ ts-log@^2.2.3: resolved "https://registry.yarnpkg.com/ts-log/-/ts-log-2.2.3.tgz#4da5640fe25a9fb52642cd32391c886721318efb" integrity sha512-XvB+OdKSJ708Dmf9ore4Uf/q62AYDTzFcAdxc8KNML1mmAWywRFVt/dn1KYJH8Agt5UJNujfM3znU5PxgAzA2w== -ts-node@^10.7.0: - version "10.9.1" - resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" - integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== - dependencies: - "@cspotcode/source-map-support" "^0.8.0" - "@tsconfig/node10" "^1.0.7" - "@tsconfig/node12" "^1.0.7" - "@tsconfig/node14" "^1.0.0" - "@tsconfig/node16" "^1.0.2" - acorn "^8.4.1" - acorn-walk "^8.1.1" - arg "^4.1.0" - create-require "^1.1.0" - diff "^4.0.1" - make-error "^1.1.1" - v8-compile-cache-lib "^3.0.1" - yn "3.1.1" - tsconfig-paths@^3.14.1: version "3.14.2" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" @@ -16078,23 +10517,11 @@ type-check@^0.4.0, type-check@~0.4.0: dependencies: prelude-ls "^1.2.1" -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= - dependencies: - prelude-ls "~1.1.2" - -type-detect@4.0.8, type-detect@^4.0.8: +type-detect@^4.0.0, type-detect@^4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== -type-fest@^0.16.0: - version "0.16.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.16.0.tgz#3240b891a78b0deae910dbeb86553e552a148860" - integrity sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg== - type-fest@^0.20.2: version "0.20.2" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" @@ -16115,21 +10542,6 @@ type-fest@^2.0.0: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== -type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -typedarray-to-buffer@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" - integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== - dependencies: - is-typedarray "^1.0.0" - typescript@^4.8.4: version "4.9.4" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.4.tgz#a2a3d2756c079abda241d75f149df9d561091e78" @@ -16140,6 +10552,11 @@ ua-parser-js@^0.7.30: resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532" integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw== +ufo@^1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.3.1.tgz#e085842f4627c41d4c1b60ebea1f75cdab4ce86b" + integrity sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw== + unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" @@ -16155,28 +10572,10 @@ unc-path-regex@^0.1.2: resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" integrity sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg== -unicode-canonical-property-names-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" - integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== - -unicode-match-property-ecmascript@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" - integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== - dependencies: - unicode-canonical-property-names-ecmascript "^2.0.0" - unicode-property-aliases-ecmascript "^2.0.0" - -unicode-match-property-value-ecmascript@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" - integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== - -unicode-property-aliases-ecmascript@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" - integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== +undici-types@~5.26.4: + version "5.26.5" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" + integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== unified@^9.0.0: version "9.2.1" @@ -16190,23 +10589,6 @@ unified@^9.0.0: trough "^1.0.0" vfile "^4.0.0" -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - -unique-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - unist-builder@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/unist-builder/-/unist-builder-2.0.3.tgz#77648711b5d86af0942f334397a33c5e91516436" @@ -16260,10 +10642,10 @@ universal-analytics@^0.4.20: request "^2.88.2" uuid "^3.0.0" -universalify@^0.1.0, universalify@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" - integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== +universalify@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0" + integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg== universalify@^2.0.0: version "2.0.0" @@ -16277,29 +10659,6 @@ unixify@^1.0.0: dependencies: normalize-path "^2.1.1" -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== - -unquote@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" - integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= - -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -upath@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - update-browserslist-db@^1.0.11: version "1.0.11" resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" @@ -16337,10 +10696,13 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= +url-parse@^1.5.3: + version "1.5.10" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" urlpattern-polyfill@^8.0.0: version "8.0.2" @@ -16379,36 +10741,11 @@ use-sidecar@^1.0.5: detect-node-es "^1.1.0" tslib "^2.0.0" -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: +util-deprecate@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= -util.promisify@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" - integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.2" - has-symbols "^1.0.1" - object.getownpropertydescriptors "^2.1.0" - -utila@~0.4: - version "0.4.0" - resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c" - integrity sha1-ihagXURWV6Oupe7MWxKk+lN5dyw= - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== - uuid@^3.0.0, uuid@^3.3.2: version "3.4.0" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" @@ -16419,20 +10756,6 @@ uuid@^8.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -v8-compile-cache-lib@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" - integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== - -v8-to-istanbul@^8.1.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz#77b752fd3975e31bbcef938f85e9bd1c7a8d60ed" - integrity sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - source-map "^0.7.3" - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" @@ -16443,11 +10766,6 @@ value-or-promise@^1.0.11, value-or-promise@^1.0.12: resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.12.tgz#0e5abfeec70148c78460a849f6b003ea7986f15c" integrity sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q== -vary@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== - verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" @@ -16485,57 +10803,100 @@ virtualizedtableforantd4@^1.2.1: resolved "https://registry.yarnpkg.com/virtualizedtableforantd4/-/virtualizedtableforantd4-1.2.1.tgz#331e8d2f203cdee6667cb5b9cbd7af823f99f65a" integrity sha512-Hl21jF3WZESanz/iKIjvbjeZ5gGX2t85h2cWQFJAagOQnN7t/pvC4kXhfYNseJtaiU6QHOm5RgX3ud+oXeST1Q== -w3c-hr-time@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== +vite-node@0.34.6: + version "0.34.6" + resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-0.34.6.tgz#34d19795de1498562bf21541a58edcd106328a17" + integrity sha512-nlBMJ9x6n7/Amaz6F3zJ97EBwR2FkzhBRxF5e+jE6LA3yi6Wtc2lyTij1OnDMIr34v5g/tVQtsVAzhT0jc5ygA== dependencies: - browser-process-hrtime "^1.0.0" + cac "^6.7.14" + debug "^4.3.4" + mlly "^1.4.0" + pathe "^1.1.1" + picocolors "^1.0.0" + vite "^3.0.0 || ^4.0.0 || ^5.0.0-0" -w3c-keyname@^2.2.0, w3c-keyname@^2.2.4: - version "2.2.6" - resolved "https://registry.yarnpkg.com/w3c-keyname/-/w3c-keyname-2.2.6.tgz#8412046116bc16c5d73d4e612053ea10a189c85f" - integrity sha512-f+fciywl1SJEniZHD6H+kUO8gOnwIr7f4ijKA6+ZvJFjeGi1r4PDLl53Ayud9O/rk64RqgoQine0feoeOU0kXg== +vite-plugin-babel-macros@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/vite-plugin-babel-macros/-/vite-plugin-babel-macros-1.0.6.tgz#d05cee3c38c620ccb534e38f412fdd899a3365b5" + integrity sha512-7cCT8jtu5UjpE46pH7RyVltWw5FbhDAtQliZ6QGqRNR5RUZKdAsB0CDjuF+VBoDpnl0KuESPu22SoNqXRYYWyQ== + dependencies: + "@babel/core" "^7.17.7" + "@babel/plugin-syntax-jsx" "^7.16.7" + "@babel/plugin-syntax-typescript" "^7.16.7" + "@types/babel__core" "^7.1.18" + babel-plugin-macros "^3.1.0" -w3c-xmlserializer@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz#3e7104a05b75146cc60f564380b7f683acf1020a" - integrity sha512-4tzD0mF8iSiMiNs30BiLO3EpfGLZUT2MSX/G+o7ZywDzliWQ3OPtTZ0PTC3B3ca1UAf4cJMHB+2Bf56EriJuRA== +vite-plugin-static-copy@^0.17.0: + version "0.17.0" + resolved "https://registry.yarnpkg.com/vite-plugin-static-copy/-/vite-plugin-static-copy-0.17.0.tgz#e45527da186c4a3818d09635797b6fc7cc9e035f" + integrity sha512-2HpNbHfDt8SDy393AGXh9llHkc8FJMQkI8s3T5WsH3SWLMO+f5cFIyPErl4yGKU9Uh3Vaqsd4lHZYTf042fQ2A== dependencies: - xml-name-validator "^3.0.0" + chokidar "^3.5.3" + fast-glob "^3.2.11" + fs-extra "^11.1.0" + picocolors "^1.0.0" -wait-on@7.2.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-7.2.0.tgz#d76b20ed3fc1e2bebc051fae5c1ff93be7892928" - integrity sha512-wCQcHkRazgjG5XoAq9jbTMLpNIjoSlZslrJ2+N9MxDsGEv1HnFoVjOCexL0ESva7Y9cu350j+DWADdk54s4AFQ== +vite-plugin-svgr@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/vite-plugin-svgr/-/vite-plugin-svgr-4.1.0.tgz#f11072a873856039702bb66657379c53d3bb5d5a" + integrity sha512-v7Qic+FWmCChgQNGSI4V8X63OEYsdUoLt66iqIcHozq9bfK/Dwmr0V+LBy1NE8CE98Y8HouEBJ+pto4AMfN5xw== dependencies: - axios "^1.6.1" - joi "^17.11.0" - lodash "^4.17.21" - minimist "^1.2.8" - rxjs "^7.8.1" + "@rollup/pluginutils" "^5.0.4" + "@svgr/core" "^8.1.0" + "@svgr/plugin-jsx" "^8.1.0" -walker@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" - integrity sha1-L3+bj9ENZ3JisYqITijRlhjgKPs= +"vite@^3.0.0 || ^4.0.0 || ^5.0.0-0", "vite@^3.1.0 || ^4.0.0 || ^5.0.0-0", vite@^4.5.2: + version "4.5.2" + resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.2.tgz#d6ea8610e099851dad8c7371599969e0f8b97e82" + integrity sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w== dependencies: - makeerror "1.0.x" + esbuild "^0.18.10" + postcss "^8.4.27" + rollup "^3.27.1" + optionalDependencies: + fsevents "~2.3.2" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +vitest@^0.34.6: + version "0.34.6" + resolved "https://registry.yarnpkg.com/vitest/-/vitest-0.34.6.tgz#44880feeeef493c04b7f795ed268f24a543250d7" + integrity sha512-+5CALsOvbNKnS+ZHMXtuUC7nL8/7F1F2DnHGjSsszX8zCjWSSviphCb/NuS9Nzf4Q03KyyDRBAXhF/8lffME4Q== dependencies: - glob-to-regexp "^0.4.1" - graceful-fs "^4.1.2" + "@types/chai" "^4.3.5" + "@types/chai-subset" "^1.3.3" + "@types/node" "*" + "@vitest/expect" "0.34.6" + "@vitest/runner" "0.34.6" + "@vitest/snapshot" "0.34.6" + "@vitest/spy" "0.34.6" + "@vitest/utils" "0.34.6" + acorn "^8.9.0" + acorn-walk "^8.2.0" + cac "^6.7.14" + chai "^4.3.10" + debug "^4.3.4" + local-pkg "^0.4.3" + magic-string "^0.30.1" + pathe "^1.1.1" + picocolors "^1.0.0" + std-env "^3.3.3" + strip-literal "^1.0.1" + tinybench "^2.5.0" + tinypool "^0.7.0" + vite "^3.1.0 || ^4.0.0 || ^5.0.0-0" + vite-node "0.34.6" + why-is-node-running "^2.2.2" + +w3c-keyname@^2.2.0, w3c-keyname@^2.2.4: + version "2.2.6" + resolved "https://registry.yarnpkg.com/w3c-keyname/-/w3c-keyname-2.2.6.tgz#8412046116bc16c5d73d4e612053ea10a189c85f" + integrity sha512-f+fciywl1SJEniZHD6H+kUO8gOnwIr7f4ijKA6+ZvJFjeGi1r4PDLl53Ayud9O/rk64RqgoQine0feoeOU0kXg== -wbuf@^1.1.0, wbuf@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/wbuf/-/wbuf-1.7.3.tgz#c1d8d149316d3ea852848895cb6a0bfe887b87df" - integrity sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA== +w3c-xmlserializer@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz#aebdc84920d806222936e3cdce408e32488a3073" + integrity sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw== dependencies: - minimalistic-assert "^1.0.0" + xml-name-validator "^4.0.0" wcwidth@^1.0.1: version "1.0.1" @@ -16575,165 +10936,30 @@ webidl-conversions@^3.0.0: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== -webidl-conversions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" - integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== - -webidl-conversions@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-5.0.0.tgz#ae59c8a00b121543a2acc65c0434f57b0fc11aff" - integrity sha512-VlZwKPCkYKxQgeSbH5EyngOmRp7Ww7I9rQLERETtf5ofd9pGeswWiOtogpEO850jziPRarreGxn5QIiTqpb2wA== - -webidl-conversions@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514" - integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w== - -webpack-dev-middleware@^5.3.1: - version "5.3.3" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f" - integrity sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA== - dependencies: - colorette "^2.0.10" - memfs "^3.4.3" - mime-types "^2.1.31" - range-parser "^1.2.1" - schema-utils "^4.0.0" - -webpack-dev-server@^4.6.0: - version "4.15.1" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz#8944b29c12760b3a45bdaa70799b17cb91b03df7" - integrity sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA== - dependencies: - "@types/bonjour" "^3.5.9" - "@types/connect-history-api-fallback" "^1.3.5" - "@types/express" "^4.17.13" - "@types/serve-index" "^1.9.1" - "@types/serve-static" "^1.13.10" - "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.5" - ansi-html-community "^0.0.8" - bonjour-service "^1.0.11" - chokidar "^3.5.3" - colorette "^2.0.10" - compression "^1.7.4" - connect-history-api-fallback "^2.0.0" - default-gateway "^6.0.3" - express "^4.17.3" - graceful-fs "^4.2.6" - html-entities "^2.3.2" - http-proxy-middleware "^2.0.3" - ipaddr.js "^2.0.1" - launch-editor "^2.6.0" - open "^8.0.9" - p-retry "^4.5.0" - rimraf "^3.0.2" - schema-utils "^4.0.0" - selfsigned "^2.1.1" - serve-index "^1.9.1" - sockjs "^0.3.24" - spdy "^4.0.2" - webpack-dev-middleware "^5.3.1" - ws "^8.13.0" - -webpack-manifest-plugin@^4.0.2: - version "4.1.1" - resolved "https://registry.yarnpkg.com/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz#10f8dbf4714ff93a215d5a45bcc416d80506f94f" - integrity sha512-YXUAwxtfKIJIKkhg03MKuiFAD72PlrqCiwdwO4VEXdRO5V0ORCNwaOwAZawPZalCbmH9kBDmXnNeQOw+BIEiow== - dependencies: - tapable "^2.0.0" - webpack-sources "^2.2.0" - -webpack-merge@^5.8.0: - version "5.9.0" - resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.9.0.tgz#dc160a1c4cf512ceca515cc231669e9ddb133826" - integrity sha512-6NbRQw4+Sy50vYNTw7EyOn41OZItPiXB8GNv3INSoe3PSFaHJEz3SHTrYVaRm2LilNGnFUzh0FAwqPEmU/CwDg== - dependencies: - clone-deep "^4.0.1" - wildcard "^2.0.0" - -webpack-sources@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" - integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== - dependencies: - source-list-map "^2.0.0" - source-map "~0.6.1" - -webpack-sources@^2.2.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.1.tgz#570de0af163949fe272233c2cefe1b56f74511fd" - integrity sha512-y9EI9AO42JjEcrTJFOYmVywVZdKVUfOvDUPsJea5GIr1JOEGFVqwlY2K098fFoIjOkDzHn2AjRvM8dsBZu+gCA== - dependencies: - source-list-map "^2.0.1" - source-map "^0.6.1" - -webpack-sources@^3.2.3: - version "3.2.3" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.3.tgz#2d4daab8451fd4b240cc27055ff6a0c2ccea0cde" - integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== +webidl-conversions@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-7.0.0.tgz#256b4e1882be7debbf01d05f0aa2039778ea080a" + integrity sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g== -webpack@^5.64.4: - version "5.88.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.0.tgz#a07aa2f8e7a64a8f1cec0c6c2e180e3cb34440c8" - integrity sha512-O3jDhG5e44qIBSi/P6KpcCcH7HD+nYIHVBhdWFxcLOcIGN8zGo5nqF3BjyNCxIh4p1vFdNnreZv2h2KkoAw3lw== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^1.0.0" - "@webassemblyjs/ast" "^1.11.5" - "@webassemblyjs/wasm-edit" "^1.11.5" - "@webassemblyjs/wasm-parser" "^1.11.5" - acorn "^8.7.1" - acorn-import-assertions "^1.9.0" - browserslist "^4.14.5" - chrome-trace-event "^1.0.2" - enhanced-resolve "^5.15.0" - es-module-lexer "^1.2.1" - eslint-scope "5.1.1" - events "^3.2.0" - glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" - json-parse-even-better-errors "^2.3.1" - loader-runner "^4.2.0" - mime-types "^2.1.27" - neo-async "^2.6.2" - schema-utils "^3.2.0" - tapable "^2.1.1" - terser-webpack-plugin "^5.3.7" - watchpack "^2.4.0" - webpack-sources "^3.2.3" - -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: - version "0.7.4" - resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.7.4.tgz#89ad5295bbf64b480abcba31e4953aca706f5760" - integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== +whatwg-encoding@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz#e7635f597fd87020858626805a2729fa7698ac53" + integrity sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg== dependencies: - http-parser-js ">=0.5.1" - safe-buffer ">=5.1.0" - websocket-extensions ">=0.1.1" + iconv-lite "0.6.3" -websocket-extensions@>=0.1.1: - version "0.1.4" - resolved "https://registry.yarnpkg.com/websocket-extensions/-/websocket-extensions-0.1.4.tgz#7f8473bc839dfd87608adb95d7eb075211578a42" - integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== +whatwg-mimetype@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz#5fa1a7623867ff1af6ca3dc72ad6b8a4208beba7" + integrity sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q== -whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== +whatwg-url@^12.0.0, whatwg-url@^12.0.1: + version "12.0.1" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-12.0.1.tgz#fd7bcc71192e7c3a2a97b9a8d6b094853ed8773c" + integrity sha512-Ed/LrqB8EPlGxjS+TrsXcpUond1mhccS3pchLhzSgPCnTimUCKj3IZE75pAs5m6heB2U2TMerKFUXheyHY+VDQ== dependencies: - iconv-lite "0.4.24" - -whatwg-fetch@^3.6.2: - version "3.6.2" - resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" - integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== - -whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + tr46 "^4.1.1" + webidl-conversions "^7.0.0" whatwg-url@^5.0.0: version "5.0.0" @@ -16743,24 +10969,6 @@ whatwg-url@^5.0.0: tr46 "~0.0.3" webidl-conversions "^3.0.0" -whatwg-url@^7.0.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" - integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== - dependencies: - lodash.sortby "^4.7.0" - tr46 "^1.0.1" - webidl-conversions "^4.0.2" - -whatwg-url@^8.0.0, whatwg-url@^8.5.0: - version "8.5.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-8.5.0.tgz#7752b8464fc0903fec89aa9846fc9efe07351fd3" - integrity sha512-fy+R77xWv0AiqfLl4nuGUlQ3/6b5uNfQ4WAbGQVMYshCTCCPK9psC1nWh3XHuxGVCtlcDDQPQW1csmmIQo+fwg== - dependencies: - lodash "^4.7.0" - tr46 "^2.0.2" - webidl-conversions "^6.1.0" - which-boxed-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" @@ -16772,17 +10980,31 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" +which-collection@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" + integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== + dependencies: + is-map "^2.0.1" + is-set "^2.0.1" + is-weakmap "^2.0.1" + is-weakset "^2.0.1" + which-module@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= -which@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== +which-typed-array@^1.1.11, which-typed-array@^1.1.9: + version "1.1.13" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" + integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== dependencies: - isexe "^2.0.0" + available-typed-arrays "^1.0.5" + call-bind "^1.0.4" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" which@^2.0.1: version "2.0.2" @@ -16791,185 +11013,19 @@ which@^2.0.1: dependencies: isexe "^2.0.0" -wildcard@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67" - integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ== +why-is-node-running@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/why-is-node-running/-/why-is-node-running-2.2.2.tgz#4185b2b4699117819e7154594271e7e344c9973e" + integrity sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA== + dependencies: + siginfo "^2.0.0" + stackback "0.0.2" -word-wrap@^1.2.3, word-wrap@~1.2.3: +word-wrap@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== -workbox-background-sync@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-background-sync/-/workbox-background-sync-6.6.1.tgz#08d603a33717ce663e718c30cc336f74909aff2f" - integrity sha512-trJd3ovpWCvzu4sW0E8rV3FUyIcC0W8G+AZ+VcqzzA890AsWZlUGOTSxIMmIHVusUw/FDq1HFWfy/kC/WTRqSg== - dependencies: - idb "^7.0.1" - workbox-core "6.6.1" - -workbox-broadcast-update@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-broadcast-update/-/workbox-broadcast-update-6.6.1.tgz#0fad9454cf8e4ace0c293e5617c64c75d8a8c61e" - integrity sha512-fBhffRdaANdeQ1V8s692R9l/gzvjjRtydBOvR6WCSB0BNE2BacA29Z4r9/RHd9KaXCPl6JTdI9q0bR25YKP8TQ== - dependencies: - workbox-core "6.6.1" - -workbox-build@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-build/-/workbox-build-6.6.1.tgz#6010e9ce550910156761448f2dbea8cfcf759cb0" - integrity sha512-INPgDx6aRycAugUixbKgiEQBWD0MPZqU5r0jyr24CehvNuLPSXp/wGOpdRJmts656lNiXwqV7dC2nzyrzWEDnw== - dependencies: - "@apideck/better-ajv-errors" "^0.3.1" - "@babel/core" "^7.11.1" - "@babel/preset-env" "^7.11.0" - "@babel/runtime" "^7.11.2" - "@rollup/plugin-babel" "^5.2.0" - "@rollup/plugin-node-resolve" "^11.2.1" - "@rollup/plugin-replace" "^2.4.1" - "@surma/rollup-plugin-off-main-thread" "^2.2.3" - ajv "^8.6.0" - common-tags "^1.8.0" - fast-json-stable-stringify "^2.1.0" - fs-extra "^9.0.1" - glob "^7.1.6" - lodash "^4.17.20" - pretty-bytes "^5.3.0" - rollup "^2.43.1" - rollup-plugin-terser "^7.0.0" - source-map "^0.8.0-beta.0" - stringify-object "^3.3.0" - strip-comments "^2.0.1" - tempy "^0.6.0" - upath "^1.2.0" - workbox-background-sync "6.6.1" - workbox-broadcast-update "6.6.1" - workbox-cacheable-response "6.6.1" - workbox-core "6.6.1" - workbox-expiration "6.6.1" - workbox-google-analytics "6.6.1" - workbox-navigation-preload "6.6.1" - workbox-precaching "6.6.1" - workbox-range-requests "6.6.1" - workbox-recipes "6.6.1" - workbox-routing "6.6.1" - workbox-strategies "6.6.1" - workbox-streams "6.6.1" - workbox-sw "6.6.1" - workbox-window "6.6.1" - -workbox-cacheable-response@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-cacheable-response/-/workbox-cacheable-response-6.6.1.tgz#284c2b86be3f4fd191970ace8c8e99797bcf58e9" - integrity sha512-85LY4veT2CnTCDxaVG7ft3NKaFbH6i4urZXgLiU4AiwvKqS2ChL6/eILiGRYXfZ6gAwDnh5RkuDbr/GMS4KSag== - dependencies: - workbox-core "6.6.1" - -workbox-core@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-core/-/workbox-core-6.6.1.tgz#7184776d4134c5ed2f086878c882728fc9084265" - integrity sha512-ZrGBXjjaJLqzVothoE12qTbVnOAjFrHDXpZe7coCb6q65qI/59rDLwuFMO4PcZ7jcbxY+0+NhUVztzR/CbjEFw== - -workbox-expiration@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-expiration/-/workbox-expiration-6.6.1.tgz#a841fa36676104426dbfb9da1ef6a630b4f93739" - integrity sha512-qFiNeeINndiOxaCrd2DeL1Xh1RFug3JonzjxUHc5WkvkD2u5abY3gZL1xSUNt3vZKsFFGGORItSjVTVnWAZO4A== - dependencies: - idb "^7.0.1" - workbox-core "6.6.1" - -workbox-google-analytics@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-google-analytics/-/workbox-google-analytics-6.6.1.tgz#a07a6655ab33d89d1b0b0a935ffa5dea88618c5d" - integrity sha512-1TjSvbFSLmkpqLcBsF7FuGqqeDsf+uAXO/pjiINQKg3b1GN0nBngnxLcXDYo1n/XxK4N7RaRrpRlkwjY/3ocuA== - dependencies: - workbox-background-sync "6.6.1" - workbox-core "6.6.1" - workbox-routing "6.6.1" - workbox-strategies "6.6.1" - -workbox-navigation-preload@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-navigation-preload/-/workbox-navigation-preload-6.6.1.tgz#61a34fe125558dd88cf09237f11bd966504ea059" - integrity sha512-DQCZowCecO+wRoIxJI2V6bXWK6/53ff+hEXLGlQL4Rp9ZaPDLrgV/32nxwWIP7QpWDkVEtllTAK5h6cnhxNxDA== - dependencies: - workbox-core "6.6.1" - -workbox-precaching@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-precaching/-/workbox-precaching-6.6.1.tgz#dedeeba10a2d163d990bf99f1c2066ac0d1a19e2" - integrity sha512-K4znSJ7IKxCnCYEdhNkMr7X1kNh8cz+mFgx9v5jFdz1MfI84pq8C2zG+oAoeE5kFrUf7YkT5x4uLWBNg0DVZ5A== - dependencies: - workbox-core "6.6.1" - workbox-routing "6.6.1" - workbox-strategies "6.6.1" - -workbox-range-requests@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-range-requests/-/workbox-range-requests-6.6.1.tgz#ddaf7e73af11d362fbb2f136a9063a4c7f507a39" - integrity sha512-4BDzk28govqzg2ZpX0IFkthdRmCKgAKreontYRC5YsAPB2jDtPNxqx3WtTXgHw1NZalXpcH/E4LqUa9+2xbv1g== - dependencies: - workbox-core "6.6.1" - -workbox-recipes@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-recipes/-/workbox-recipes-6.6.1.tgz#ea70d2b2b0b0bce8de0a9d94f274d4a688e69fae" - integrity sha512-/oy8vCSzromXokDA+X+VgpeZJvtuf8SkQ8KL0xmRivMgJZrjwM3c2tpKTJn6PZA6TsbxGs3Sc7KwMoZVamcV2g== - dependencies: - workbox-cacheable-response "6.6.1" - workbox-core "6.6.1" - workbox-expiration "6.6.1" - workbox-precaching "6.6.1" - workbox-routing "6.6.1" - workbox-strategies "6.6.1" - -workbox-routing@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-routing/-/workbox-routing-6.6.1.tgz#cba9a1c7e0d1ea11e24b6f8c518840efdc94f581" - integrity sha512-j4ohlQvfpVdoR8vDYxTY9rA9VvxTHogkIDwGdJ+rb2VRZQ5vt1CWwUUZBeD/WGFAni12jD1HlMXvJ8JS7aBWTg== - dependencies: - workbox-core "6.6.1" - -workbox-strategies@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-strategies/-/workbox-strategies-6.6.1.tgz#38d0f0fbdddba97bd92e0c6418d0b1a2ccd5b8bf" - integrity sha512-WQLXkRnsk4L81fVPkkgon1rZNxnpdO5LsO+ws7tYBC6QQQFJVI6v98klrJEjFtZwzw/mB/HT5yVp7CcX0O+mrw== - dependencies: - workbox-core "6.6.1" - -workbox-streams@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-streams/-/workbox-streams-6.6.1.tgz#b2f7ba7b315c27a6e3a96a476593f99c5d227d26" - integrity sha512-maKG65FUq9e4BLotSKWSTzeF0sgctQdYyTMq529piEN24Dlu9b6WhrAfRpHdCncRS89Zi2QVpW5V33NX8PgH3Q== - dependencies: - workbox-core "6.6.1" - workbox-routing "6.6.1" - -workbox-sw@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-sw/-/workbox-sw-6.6.1.tgz#d4c4ca3125088e8b9fd7a748ed537fa0247bd72c" - integrity sha512-R7whwjvU2abHH/lR6kQTTXLHDFU2izht9kJOvBRYK65FbwutT4VvnUAJIgHvfWZ/fokrOPhfoWYoPCMpSgUKHQ== - -workbox-webpack-plugin@^6.4.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-webpack-plugin/-/workbox-webpack-plugin-6.6.1.tgz#4f81cc1ad4e5d2cd7477a86ba83c84ee2d187531" - integrity sha512-zpZ+ExFj9NmiI66cFEApyjk7hGsfJ1YMOaLXGXBoZf0v7Iu6hL0ZBe+83mnDq3YYWAfA3fnyFejritjOHkFcrA== - dependencies: - fast-json-stable-stringify "^2.1.0" - pretty-bytes "^5.4.1" - upath "^1.2.0" - webpack-sources "^1.4.3" - workbox-build "6.6.1" - -workbox-window@6.6.1: - version "6.6.1" - resolved "https://registry.yarnpkg.com/workbox-window/-/workbox-window-6.6.1.tgz#f22a394cbac36240d0dadcbdebc35f711bb7b89e" - integrity sha512-wil4nwOY58nTdCvif/KEZjQ2NP8uk3gGeRNy2jPBbzypU4BT4D9L8xiwbmDBpZlSgJd2xsT9FvSNU0gsxV51JQ== - dependencies: - "@types/trusted-types" "^2.0.2" - workbox-core "6.6.1" - wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" @@ -16993,35 +11049,20 @@ wrappy@1: resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= -write-file-atomic@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" - integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== - dependencies: - imurmurhash "^0.1.4" - is-typedarray "^1.0.0" - signal-exit "^3.0.2" - typedarray-to-buffer "^3.1.5" - ws@8.14.2, ws@^8.12.0: version "8.14.2" resolved "https://registry.yarnpkg.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f" integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g== -ws@^7.4.6: - version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== - ws@^8.13.0: version "8.13.0" resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== +xml-name-validator@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-4.0.0.tgz#79a006e2e63149a8600f15430f0a4725d1524835" + integrity sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw== xmlchars@^2.2.0: version "2.2.0" @@ -17073,16 +11114,11 @@ yaml-ast-parser@^0.0.43: resolved "https://registry.yarnpkg.com/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz#e8a23e6fb4c38076ab92995c5dca33f3d3d7c9bb" integrity sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A== -yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: +yaml@^1.10.0, yaml@^1.7.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yaml@^2.1.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.1.tgz#02fe0975d23cd441242aa7204e09fc28ac2ac33b" - integrity sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ== - yaml@^2.3.1: version "2.3.4" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" @@ -17164,16 +11200,16 @@ yjs@^13.5.23: dependencies: lib0 "^0.2.49" -yn@3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" - integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== - yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== +yocto-queue@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" + integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== + zen-observable@^0.8.14: version "0.8.15" resolved "https://registry.yarnpkg.com/zen-observable/-/zen-observable-0.8.15.tgz#96415c512d8e3ffd920afd3889604e30b9eaac15" diff --git a/docker/README.md b/docker/README.md index 21d38bbb7f2eeb..ad847dc70cf3c0 100644 --- a/docker/README.md +++ b/docker/README.md @@ -26,13 +26,13 @@ DataHub Docker Images: Do not use `latest` or `debug` tags for any of the image as those are not supported and present only due to legacy reasons. Please use `head` or tags specific for versions like `v0.8.40`. For production we recommend using version specific tags not `head`. * [acryldata/datahub-ingestion](https://hub.docker.com/r/acryldata/datahub-ingestion/) -* [linkedin/datahub-gms](https://hub.docker.com/repository/docker/linkedin/datahub-gms/) -* [linkedin/datahub-frontend-react](https://hub.docker.com/repository/docker/linkedin/datahub-frontend-react/) -* [linkedin/datahub-mae-consumer](https://hub.docker.com/repository/docker/linkedin/datahub-mae-consumer/) -* [linkedin/datahub-mce-consumer](https://hub.docker.com/repository/docker/linkedin/datahub-mce-consumer/) +* [acryldata/datahub-gms](https://hub.docker.com/repository/docker/acryldata/datahub-gms/) +* [acryldata/datahub-frontend-react](https://hub.docker.com/repository/docker/acryldata/datahub-frontend-react/) +* [acryldata/datahub-mae-consumer](https://hub.docker.com/repository/docker/acryldata/datahub-mae-consumer/) +* [acryldata/datahub-mce-consumer](https://hub.docker.com/repository/docker/acryldata/datahub-mce-consumer/) * [acryldata/datahub-upgrade](https://hub.docker.com/r/acryldata/datahub-upgrade/) -* [linkedin/datahub-kafka-setup](https://hub.docker.com/r/acryldata/datahub-kafka-setup/) -* [linkedin/datahub-elasticsearch-setup](https://hub.docker.com/r/linkedin/datahub-elasticsearch-setup/) +* [acryldata/datahub-kafka-setup](https://hub.docker.com/r/acryldata/datahub-kafka-setup/) +* [acryldata/datahub-elasticsearch-setup](https://hub.docker.com/r/acryldata/datahub-elasticsearch-setup/) * [acryldata/datahub-mysql-setup](https://hub.docker.com/r/acryldata/datahub-mysql-setup/) * [acryldata/datahub-postgres-setup](https://hub.docker.com/r/acryldata/datahub-postgres-setup/) * [acryldata/datahub-actions](https://hub.docker.com/r/acryldata/datahub-actions). Do not use `acryldata/acryl-datahub-actions` as that is deprecated and no longer used. @@ -64,7 +64,7 @@ successful release on Github will automatically publish the images. To build the full images (that we are going to publish), you need to run the following: ``` -COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub build +COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose -p datahub build ``` This is because we're relying on builtkit for multistage builds. It does not hurt also set `DATAHUB_VERSION` to diff --git a/docker/airflow/local_airflow.md b/docker/airflow/local_airflow.md index fbfc1d17327c53..e2bd62df84098b 100644 --- a/docker/airflow/local_airflow.md +++ b/docker/airflow/local_airflow.md @@ -54,7 +54,7 @@ curl -L 'https://raw.githubusercontent.com/datahub-project/datahub/master/metada First you need to initialize airflow in order to create initial database tables and the initial airflow user. ``` -docker-compose up airflow-init +docker compose up airflow-init ``` You should see the following final initialization message @@ -66,10 +66,10 @@ airflow_install_airflow-init_1 exited with code 0 ``` -Afterwards you need to start the airflow docker-compose +Afterwards you need to start the airflow docker compose ``` -docker-compose up +docker compose up ``` You should see a host of messages as Airflow starts up. diff --git a/docker/build.gradle b/docker/build.gradle index bc79be501b3952..b14739104a9f1e 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -1,161 +1,155 @@ plugins { id 'java' // required by versioning + id 'docker-compose' } +import com.avast.gradle.dockercompose.tasks.ComposeUp +import com.avast.gradle.dockercompose.tasks.ComposeDownForced apply from: "../gradle/versioning/versioning.gradle" ext { - quickstart_modules = [ + backend_profile_modules = [ ':docker:elasticsearch-setup', ':docker:mysql-setup', ':docker:kafka-setup', ':datahub-upgrade', + ':metadata-service:war', + ] + quickstart_modules = backend_profile_modules + [ ':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job', - ':metadata-service:war', - ':datahub-frontend', + ':datahub-frontend' ] debug_modules = quickstart_modules - [':metadata-jobs:mce-consumer-job', ':metadata-jobs:mae-consumer-job'] - debug_compose_args = [ - '-f', 'docker-compose-without-neo4j.yml', - '-f', 'docker-compose-without-neo4j.override.yml', - '-f', 'docker-compose-without-neo4j.m1.yml', // updates to mariadb - '-f', 'docker-compose.dev.yml' - ] + compose_args = ['-f', 'profiles/docker-compose.yml'] debug_reloadable = [ - 'datahub-gms', - 'datahub-frontend-react' + 'datahub-gms-debug', + 'system-update-debug', + 'frontend-debug' ] - // Postgres pg_quickstart_modules = quickstart_modules - [':docker:mysql-setup'] + [':docker:postgres-setup'] - pg_compose_args = [ - '-f', 'docker-compose-without-neo4j.yml', - '-f', 'docker-compose-without-neo4j.postgres.override.yml' - ] } -task quickstart(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(quickstart_modules.collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - // environment "ACTIONS_VERSION", 'alpine3.18-slim' - // environment "DATAHUB_ACTIONS_IMAGE", 'nginx' - - // Elastic - // environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch' - // environment "DATAHUB_SEARCH_TAG", '7.10.1' - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] - - commandLine 'bash', '-c', cmd.join(" ") +tasks.register('minDockerCompose2.20', Exec) { + executable 'bash' + args '-c', 'echo -e "$(docker compose version --short)\n2.20"|sort --version-sort --check=quiet --reverse' } -task quickstartSlim(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(([':docker:datahub-ingestion'] + quickstart_modules).collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - environment "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" - environment "ACTIONS_VERSION", "v${version}-slim" - environment "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' - environment "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] - - commandLine 'bash', '-c', cmd.join(" ") +tasks.register('quickstart') {} +tasks.register('quickstartSlim') {} +tasks.register('quickstartDebug') {} +tasks.register('quickstartPg') {} + +tasks.register('quickstartNuke') { + doFirst { + dockerCompose.quickstart.removeVolumes = true + dockerCompose.quickstartPg.removeVolumes = true + dockerCompose.quickstartSlim.removeVolumes = true + dockerCompose.quickstartDebug.removeVolumes = true + } + finalizedBy(tasks.withType(ComposeDownForced)) } -task quickstartNuke(type: Exec, dependsOn: ":metadata-ingestion:install") { - shouldRunAfter(':metadata-ingestion:clean') - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker nuke' - ] - commandLine 'bash', '-c', cmd.join(" ") +tasks.register('quickstartDown') { + finalizedBy(tasks.withType(ComposeDownForced)) } -task quickstartDebug(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(debug_modules.collect { it + ':dockerTagDebug' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - - // Elastic - // environment "DATAHUB_SEARCH_IMAGE", 'elasticsearch' - // environment "DATAHUB_SEARCH_TAG", '7.10.1' - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--version', "debug", - '--dump-logs-on-failure' - ] + debug_compose_args - commandLine 'bash', '-c', cmd.join(" ") +dockerCompose { + quickstart { + isRequiredBy(tasks.named('quickstart')) + composeAdditionalArgs = ['--profile', 'quickstart-consumers'] + + environment.put 'DATAHUB_VERSION', "v${version}" + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + captureContainersOutput = true + captureContainersOutputToFiles = project.file('build/container-logs') + } + + quickstartPg { + isRequiredBy(tasks.named('quickstartPg')) + composeAdditionalArgs = ['--profile', 'quickstart-postgres'] + + environment.put 'DATAHUB_VERSION', "v${version}" + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } + + /** + * The smallest disk footprint required for Spark integration tests + * + * No frontend, mae, mce, or other services + */ + quickstartSlim { + isRequiredBy(tasks.named('quickstartSlim')) + composeAdditionalArgs = ['--profile', 'quickstart-backend'] + + environment.put 'DATAHUB_VERSION', "v${version}" + environment.put "DATAHUB_ACTIONS_IMAGE", "acryldata/datahub-ingestion" + environment.put "ACTIONS_VERSION", "v${version}-slim" + environment.put "ACTIONS_EXTRA_PACKAGES", 'acryl-datahub-actions[executor] acryl-datahub-actions' + environment.put "ACTIONS_CONFIG", 'https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml' + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + captureContainersOutput = true + captureContainersOutputToFiles = project.file('build/container-logs') + } + + quickstartDebug { + isRequiredBy(tasks.named('quickstartDebug')) + composeAdditionalArgs = ['--profile', 'debug'] + + environment.put 'DATAHUB_TELEMETRY_ENABLED', 'false' // disabled when built locally + + useComposeFiles = ['profiles/docker-compose.yml'] + projectName = 'datahub' + projectNamePrefix = '' + buildBeforeUp = false + buildBeforePull = false + stopContainers = false + removeVolumes = false + } +} +tasks.getByName('quickstartComposeUp').dependsOn( + quickstart_modules.collect { it + ':dockerTag' }) +tasks.getByName('quickstartPgComposeUp').dependsOn( + pg_quickstart_modules.collect { it + ':dockerTag' }) +tasks.getByName('quickstartSlimComposeUp').dependsOn( + ([':docker:datahub-ingestion'] + backend_profile_modules) + .collect { it + ':dockerTag' }) +tasks.getByName('quickstartDebugComposeUp').dependsOn( + debug_modules.collect { it + ':dockerTagDebug' } +) +tasks.withType(ComposeUp).configureEach { + shouldRunAfter('quickstartNuke') + dependsOn tasks.named("minDockerCompose2.20") } task debugReload(type: Exec) { - def cmd = ['docker compose -p datahub'] + debug_compose_args + ['restart'] + debug_reloadable + def cmd = ['docker compose -p datahub --profile debug'] + compose_args + ['restart'] + debug_reloadable commandLine 'bash', '-c', cmd.join(" ") } - -task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { - dependsOn(pg_quickstart_modules.collect { it + ':dockerTag' }) - shouldRunAfter ':metadata-ingestion:clean', 'quickstartNuke' - - environment "DATAHUB_TELEMETRY_ENABLED", "false" - environment "DOCKER_COMPOSE_BASE", "file://${rootProject.projectDir}" - environment "DATAHUB_POSTGRES_VERSION", "15.5" - - // OpenSearch - environment "DATAHUB_SEARCH_IMAGE", 'opensearchproject/opensearch' - environment "DATAHUB_SEARCH_TAG", '2.9.0' - environment "XPACK_SECURITY_ENABLED", 'plugins.security.disabled=true' - environment "USE_AWS_ELASTICSEARCH", 'true' - - def cmd = [ - 'source ../metadata-ingestion/venv/bin/activate && ', - 'datahub docker quickstart', - '--no-pull-images', - '--standalone_consumers', - '--version', "v${version}", - '--dump-logs-on-failure' - ] + pg_compose_args - - commandLine 'bash', '-c', cmd.join(" ") -} \ No newline at end of file diff --git a/docker/cassandra/docker-compose.cassandra.yml b/docker/cassandra/docker-compose.cassandra.yml index 2be91788bb0404..ae7d649ab3d239 100644 --- a/docker/cassandra/docker-compose.cassandra.yml +++ b/docker/cassandra/docker-compose.cassandra.yml @@ -3,7 +3,6 @@ version: '3.8' services: cassandra: - container_name: cassandra hostname: cassandra image: cassandra:3.11 ports: @@ -16,7 +15,6 @@ services: volumes: - cassandradata:/var/lib/cassandra cassandra-load-keyspace: - container_name: cassandra-setup image: cassandra:3.11 depends_on: cassandra: diff --git a/docker/datahub-frontend/Dockerfile b/docker/datahub-frontend/Dockerfile index 576d4297b6283a..5563fd6350e20a 100644 --- a/docker/datahub-frontend/Dockerfile +++ b/docker/datahub-frontend/Dockerfile @@ -5,6 +5,9 @@ FROM alpine:3.18 AS base # Configurable repositories ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine +ARG GITHUB_REPO_URL=https://github.com +ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 + RUN addgroup -S datahub && adduser -S datahub -G datahub # Optionally set corporate mirror for apk @@ -12,26 +15,26 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # Upgrade Alpine and base packages # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 +ENV JMX_VERSION=0.18.0 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ - && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ + && wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ + && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar ENV LD_LIBRARY_PATH="/lib:/lib64" FROM base as prod-install COPY ./datahub-frontend.zip / -RUN unzip datahub-frontend.zip && rm datahub-frontend.zip +RUN unzip datahub-frontend.zip -d /datahub-frontend \ + && mv /datahub-frontend/main/* /datahub-frontend \ + && rmdir /datahub-frontend/main \ + && rm datahub-frontend.zip COPY ./docker/monitoring/client-prometheus-config.yaml /datahub-frontend/ RUN chown -R datahub:datahub /datahub-frontend && chmod 755 /datahub-frontend -ARG GITHUB_REPO_URL=https://github.com -ARG MAVEN_CENTRAL_REPO_URL=https://repo1.maven.org/maven2 -ENV JMX_VERSION=0.18.0 -RUN wget ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar -O opentelemetry-javaagent.jar \ - && wget ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar - FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. # See this excellent thread https://github.com/docker/cli/issues/1134 diff --git a/docker/datahub-frontend/start.sh b/docker/datahub-frontend/start.sh index 12e6b8915096d6..f5de9c87968b0d 100755 --- a/docker/datahub-frontend/start.sh +++ b/docker/datahub-frontend/start.sh @@ -49,6 +49,8 @@ export JAVA_OPTS="${JAVA_MEMORY_OPTS:-"-Xms512m -Xmx1024m"} \ -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf \ -Dlogback.configurationFile=datahub-frontend/conf/logback.xml \ -Dlogback.debug=false \ + --add-opens java.base/java.lang=ALL-UNNAMED \ + --add-opens=java.base/java.util=ALL-UNNAMED \ ${PROMETHEUS_AGENT:-} ${OTEL_AGENT:-} \ ${TRUSTSTORE_FILE:-} ${TRUSTSTORE_TYPE:-} ${TRUSTSTORE_PASSWORD:-} \ ${HTTP_PROXY:-} ${HTTPS_PROXY:-} ${NO_PROXY:-} \ diff --git a/docker/datahub-gms/Dockerfile b/docker/datahub-gms/Dockerfile index 9c79e1da542f0b..c62ab3c9e3bfaf 100644 --- a/docker/datahub-gms/Dockerfile +++ b/docker/datahub-gms/Dockerfile @@ -40,14 +40,14 @@ RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ - && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ - && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ - && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/11.0.19/jetty-runner-11.0.19.jar --output jetty-runner.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/11.0.19/jetty-jmx-11.0.19.jar --output jetty-jmx.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/11.0.19/jetty-util-11.0.19.jar --output jetty-util.jar \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-ingestion-base/Dockerfile b/docker/datahub-ingestion-base/Dockerfile index e0f9fdc997071c..bfd4ee1143f5ef 100644 --- a/docker/datahub-ingestion-base/Dockerfile +++ b/docker/datahub-ingestion-base/Dockerfile @@ -4,8 +4,8 @@ ARG BASE_IMAGE=base # Defining custom repo urls for use in enterprise environments. Re-used between stages below. ARG ALPINE_REPO_URL=http://dl-cdn.alpinelinux.org/alpine ARG GITHUB_REPO_URL=https://github.com -ARG DEBIAN_REPO_URL=http://deb.debian.org/debian -ARG PIP_MIRROR_URL=null +ARG DEBIAN_REPO_URL=https://deb.debian.org/debian +ARG PIP_MIRROR_URL=https://pypi.python.org/simple FROM golang:1-alpine3.18 AS dockerize-binary @@ -26,21 +26,20 @@ RUN go install github.com/jwilder/dockerize@$DOCKERIZE_VERSION FROM python:3.10 as base -ARG DEBIAN_REPO_URL -ARG PIP_MIRROR_URL ARG GITHUB_REPO_URL -ENV LIBRDKAFKA_VERSION=1.6.2 -ENV CONFLUENT_KAFKA_VERSION=1.6.1 - ENV DEBIAN_FRONTEND noninteractive -# Optionally set corporate mirror for apk and pip +# Optionally set corporate mirror for deb +ARG DEBIAN_REPO_URL RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi -RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi + +# Optionally set corporate mirror for pip +ARG PIP_MIRROR_URL +RUN if [ "${PIP_MIRROR_URL}" != "https://pypi.python.org/simple" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi +ENV UV_INDEX_URL=${PIP_MIRROR_URL} RUN apt-get update && apt-get install -y -qq \ - make \ python3-ldap \ libldap2-dev \ libsasl2-dev \ @@ -52,12 +51,9 @@ RUN apt-get update && apt-get install -y -qq \ zip \ unzip \ ldap-utils \ - && python -m pip install --no-cache --upgrade pip wheel setuptools \ - && wget -q ${GITHUB_REPO_URL}/edenhill/librdkafka/archive/v${LIBRDKAFKA_VERSION}.tar.gz -O - | \ - tar -xz -C /root \ - && cd /root/librdkafka-${LIBRDKAFKA_VERSION} \ - && ./configure --prefix /usr && make && make install && cd .. && rm -rf /root/librdkafka-${LIBRDKAFKA_VERSION} \ - && apt-get remove -y make \ + unixodbc \ + libodbc2 \ + && python -m pip install --no-cache --upgrade pip uv>=0.1.10 wheel setuptools \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* # compiled against newer golang for security fixes @@ -66,16 +62,21 @@ COPY --from=dockerize-binary /go/bin/dockerize /usr/local/bin COPY ./docker/datahub-ingestion-base/base-requirements.txt requirements.txt COPY ./docker/datahub-ingestion-base/entrypoint.sh /entrypoint.sh -RUN pip install --no-cache -r requirements.txt && \ - pip uninstall -y acryl-datahub && \ - chmod +x /entrypoint.sh && \ - addgroup --gid 1000 datahub && \ - adduser --disabled-password --uid 1000 --gid 1000 --home /datahub-ingestion datahub +RUN addgroup --gid 1000 datahub && \ + adduser --disabled-password --uid 1000 --gid 1000 --home /datahub-ingestion datahub && \ + chmod +x /entrypoint.sh + +USER datahub +ENV VIRTUAL_ENV=/datahub-ingestion/.venv +ENV PATH="${VIRTUAL_ENV}/bin:$PATH" +RUN python3 -m venv $VIRTUAL_ENV && \ + uv pip install --no-cache -r requirements.txt ENTRYPOINT [ "/entrypoint.sh" ] FROM ${BASE_IMAGE} as full-install +USER 0 RUN apt-get update && apt-get install -y -qq \ default-jre-headless \ && rm -rf /var/lib/apt/lists/* /var/cache/apk/* @@ -98,10 +99,11 @@ RUN if [ $(arch) = "x86_64" ]; then \ ldconfig; \ fi; +USER datahub + FROM ${BASE_IMAGE} as slim-install # Do nothing else on top of base FROM ${APP_ENV}-install -USER datahub ENV PATH="/datahub-ingestion/.local/bin:$PATH" diff --git a/docker/datahub-ingestion-base/base-requirements.txt b/docker/datahub-ingestion-base/base-requirements.txt index eb082d50b3020f..f2d7675d32ae35 100644 --- a/docker/datahub-ingestion-base/base-requirements.txt +++ b/docker/datahub-ingestion-base/base-requirements.txt @@ -1,215 +1,200 @@ -# Excluded for slim -# pyspark==3.0.3 -# pydeequ==1.0.1 - -acryl-datahub-classify==0.0.8 -acryl-PyHive==0.6.14 -acryl-sqlglot==18.5.2.dev45 +# Generated requirements file. Run ./regenerate-base-requirements.sh to regenerate. +acryl-datahub-classify==0.0.9 +acryl-PyHive==0.6.16 +acryl-sqlglot==22.4.1.dev4 aenum==3.1.15 -aiohttp==3.8.6 +aiohttp==3.9.3 aiosignal==1.3.1 -alembic==1.12.0 +alembic==1.13.1 altair==4.2.0 -anyio==3.7.1 -apache-airflow==2.7.2 -apache-airflow-providers-common-sql==1.7.2 -apache-airflow-providers-ftp==3.5.2 -apache-airflow-providers-http==4.5.2 -apache-airflow-providers-imap==3.3.2 -apache-airflow-providers-sqlite==3.4.3 -apispec==6.3.0 -appdirs==1.4.4 -appnope==0.1.3 -argcomplete==3.1.2 +anyio==4.3.0 +apache-airflow==2.8.4 +apache-airflow-providers-common-io==1.3.0 +apache-airflow-providers-common-sql==1.11.1 +apache-airflow-providers-ftp==3.7.0 +apache-airflow-providers-http==4.10.0 +apache-airflow-providers-imap==3.5.0 +apache-airflow-providers-smtp==1.6.1 +apache-airflow-providers-sqlite==3.7.1 +apispec==6.6.0 +appnope==0.1.4 +argcomplete==3.2.3 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 -asgiref==3.7.2 +asgiref==3.8.1 asn1crypto==1.5.1 -asttokens==2.4.0 +asttokens==2.4.1 async-timeout==4.0.3 -asynch==0.2.2 -attrs==23.1.0 -avro==1.10.2 -avro-gen3==0.7.11 -Babel==2.13.0 -backcall==0.2.0 +asynch==0.2.3 +attrs==23.2.0 +avro==1.11.3 +avro-gen3==0.7.12 +Babel==2.14.0 backoff==2.2.1 -beautifulsoup4==4.12.2 +beautifulsoup4==4.12.3 bleach==6.1.0 -blinker==1.6.3 +blinker==1.7.0 blis==0.7.11 -boto3==1.28.62 -botocore==1.31.62 -bowler==0.9.0 +boto3==1.34.71 +botocore==1.34.71 bracex==2.4 cached-property==1.5.2 cachelib==0.9.0 -cachetools==5.3.1 +cachetools==5.3.3 catalogue==2.0.10 -cattrs==23.1.2 -certifi==2023.7.22 +cattrs==23.2.3 +certifi==2024.2.2 cffi==1.16.0 chardet==5.2.0 -charset-normalizer==3.3.0 -ciso8601==2.3.0 +charset-normalizer==3.3.2 +ciso8601==2.3.1 click==8.1.7 click-default-group==1.2.4 click-spinner==0.1.10 clickclick==20.10.2 -clickhouse-cityhash==1.0.2.4 -clickhouse-driver==0.2.6 +clickhouse-driver==0.2.7 clickhouse-sqlalchemy==0.2.4 -cloudpickle==2.2.1 +cloudpickle==3.0.0 colorama==0.4.6 colorlog==4.8.0 -comm==0.1.4 -confection==0.1.3 -ConfigUpdater==3.1.1 -confluent-kafka==1.8.2 -connexion==2.14.2 -cron-descriptor==1.4.0 -croniter==2.0.1 -cryptography==41.0.4 -cx-Oracle==8.3.0 +comm==0.2.2 +confection==0.1.4 +ConfigUpdater==3.2 +confluent-kafka==2.3.0 +connexion==2.14.1 +cron-descriptor==1.4.3 +croniter==2.0.3 +cryptography==42.0.5 +cx_Oracle==8.3.0 cymem==2.0.8 -dask==2023.9.3 -databricks-cli==0.18.0 databricks-dbapi==0.6.0 -databricks-sdk==0.10.0 -debugpy==1.8.0 +databricks-sdk==0.23.0 +databricks-sql-connector==2.9.5 +dataflows-tabulator==1.54.3 +db-dtypes==1.2.0 +debugpy==1.8.1 decorator==5.1.1 defusedxml==0.7.1 -deltalake==0.11.0 +deltalake==0.16.3 Deprecated==1.2.14 -dill==0.3.7 -dnspython==2.4.2 -docker==6.1.3 +dill==0.3.8 +dnspython==2.6.1 +docker==7.0.0 docutils==0.20.1 ecdsa==0.18.0 elasticsearch==7.13.4 email-validator==1.3.1 entrypoints==0.4 et-xmlfile==1.1.0 -exceptiongroup==1.1.3 -executing==2.0.0 -expandvars==0.11.0 -fastapi==0.103.2 -fastavro==1.8.4 -fastjsonschema==2.18.1 -feast==0.31.1 -filelock==3.12.4 -fissix==21.11.13 +exceptiongroup==1.2.0 +executing==2.0.1 +expandvars==0.12.0 +fastavro==1.9.4 +fastjsonschema==2.19.1 +filelock==3.13.3 Flask==2.2.5 flatdict==4.0.1 -frozenlist==1.4.0 -fsspec==2023.9.2 -future==0.18.3 -GeoAlchemy2==0.14.1 -gitdb==4.0.10 -GitPython==3.1.37 -google-api-core==2.12.0 -google-auth==2.23.3 -google-cloud-appengine-logging==1.3.2 +frozenlist==1.4.1 +fsspec==2023.12.2 +future==1.0.0 +GeoAlchemy2==0.14.6 +gitdb==4.0.11 +GitPython==3.1.42 +google-api-core==2.18.0 +google-auth==2.29.0 +google-cloud-appengine-logging==1.4.3 google-cloud-audit-log==0.2.5 -google-cloud-bigquery==3.12.0 -google-cloud-core==2.3.3 +google-cloud-bigquery==3.19.0 +google-cloud-core==2.4.1 google-cloud-datacatalog-lineage==0.2.2 google-cloud-logging==3.5.0 google-crc32c==1.5.0 google-re2==1.1 -google-resumable-media==2.6.0 -googleapis-common-protos==1.60.0 -gql==3.4.1 +google-resumable-media==2.7.0 +googleapis-common-protos==1.63.0 +gql==3.5.0 graphql-core==3.2.3 -graphviz==0.20.1 great-expectations==0.15.50 -greenlet==3.0.0 -grpc-google-iam-v1==0.12.6 -grpcio==1.59.0 -grpcio-reflection==1.59.0 -grpcio-status==1.59.0 -grpcio-tools==1.59.0 +greenlet==3.0.3 +grpc-google-iam-v1==0.13.0 +grpcio==1.62.1 +grpcio-status==1.62.1 +grpcio-tools==1.62.1 gssapi==1.8.3 gunicorn==21.2.0 h11==0.14.0 -httpcore==0.18.0 -httptools==0.6.0 -httpx==0.25.0 +httpcore==1.0.4 +httpx==0.27.0 humanfriendly==10.0 -idna==3.4 +idna==3.6 ijson==3.2.3 -importlib-metadata==6.8.0 -importlib-resources==6.1.0 +importlib_metadata==7.1.0 +importlib_resources==6.4.0 inflection==0.5.1 ipaddress==1.0.23 ipykernel==6.17.1 -ipython==8.16.1 +ipython==8.21.0 ipython-genutils==0.2.0 -ipywidgets==8.1.1 +ipywidgets==8.1.2 iso3166==2.1.1 isodate==0.6.1 itsdangerous==2.1.2 jedi==0.19.1 -Jinja2==3.1.2 +Jinja2==3.1.3 jmespath==1.0.1 -JPype1==1.4.1 +JPype1==1.5.0 jsonlines==4.0.0 jsonpatch==1.33 jsonpointer==2.4 jsonref==1.1.0 -jsonschema==4.19.1 -jsonschema-specifications==2023.7.1 -jupyter-server==1.24.0 +jsonschema==4.21.1 +jsonschema-specifications==2023.12.1 +jupyter-server==1.16.0 jupyter_client==7.4.9 -jupyter_core==4.12.0 -jupyterlab-pygments==0.2.2 -jupyterlab-widgets==3.0.9 +jupyter_core==5.0.0 +jupyterlab_pygments==0.3.0 +jupyterlab_widgets==3.0.10 langcodes==3.3.0 lark==1.1.4 -lazy-object-proxy==1.9.0 -leb128==1.0.5 -limits==3.6.0 +lazy-object-proxy==1.10.0 +leb128==1.0.7 +limits==3.10.1 linear-tsv==1.1.0 -linkify-it-py==2.0.2 -lkml==1.3.1 -locket==1.0.0 +linkify-it-py==2.0.3 +lkml==1.3.4 lockfile==0.12.2 looker-sdk==23.0.0 -lxml==4.9.3 -lz4==4.3.2 -makefun==1.15.1 -Mako==1.2.4 -Markdown==3.5 +lxml==5.1.0 +lz4==4.3.3 +makefun==1.15.2 +Mako==1.3.2 markdown-it-py==3.0.0 -MarkupSafe==2.1.3 -marshmallow==3.20.1 -marshmallow-oneofschema==3.0.1 +MarkupSafe==2.1.5 +marshmallow==3.21.1 +marshmallow-oneofschema==3.1.1 marshmallow-sqlalchemy==0.26.1 matplotlib-inline==0.1.6 mdit-py-plugins==0.4.0 mdurl==0.1.2 mistune==3.0.2 -mixpanel==4.10.0 -mlflow-skinny==2.7.1 -mmh3==4.0.1 +mixpanel==4.10.1 +mlflow-skinny==2.11.3 mmhash3==3.0.1 -more-itertools==10.1.0 -moreorless==0.4.0 -moto==4.2.5 +more-itertools==10.2.0 +moto==4.2.14 msal==1.22.0 -multidict==6.0.4 +multidict==6.0.5 murmurhash==1.0.10 -mypy==1.6.0 mypy-extensions==1.0.0 nbclassic==1.0.0 nbclient==0.6.3 -nbconvert==7.9.2 -nbformat==5.9.1 -nest-asyncio==1.5.8 -networkx==3.1 +nbconvert==7.16.3 +nbformat==5.10.3 +nest-asyncio==1.6.0 +networkx==3.2.1 notebook==6.5.6 -notebook_shim==0.2.3 -numpy==1.26.0 +notebook_shim==0.2.4 +numpy==1.26.4 oauthlib==3.2.2 okta==1.7.0 openlineage-airflow==1.2.0 @@ -217,178 +202,165 @@ openlineage-integration-common==1.2.0 openlineage-python==1.2.0 openlineage_sql==1.2.0 openpyxl==3.1.2 -opentelemetry-api==1.20.0 -opentelemetry-exporter-otlp==1.20.0 -opentelemetry-exporter-otlp-proto-common==1.20.0 -opentelemetry-exporter-otlp-proto-grpc==1.20.0 -opentelemetry-exporter-otlp-proto-http==1.20.0 -opentelemetry-proto==1.20.0 -opentelemetry-sdk==1.20.0 -opentelemetry-semantic-conventions==0.41b0 +opentelemetry-api==1.16.0 +opentelemetry-exporter-otlp==1.16.0 +opentelemetry-exporter-otlp-proto-grpc==1.16.0 +opentelemetry-exporter-otlp-proto-http==1.16.0 +opentelemetry-proto==1.16.0 +opentelemetry-sdk==1.16.0 +opentelemetry-semantic-conventions==0.37b0 ordered-set==4.1.0 -oscrypto==1.3.0 packaging==23.2 -pandas==1.5.3 -pandavro==1.5.2 -pandocfilters==1.5.0 -parse==1.19.1 +pandas==2.2.1 +pandocfilters==1.5.1 +parse==1.20.1 parso==0.8.3 -partd==1.4.1 -pathspec==0.11.2 -pathy==0.10.2 -pendulum==2.1.2 -pexpect==4.8.0 +pathlib_abc==0.1.1 +pathspec==0.12.1 +pathy==0.11.0 +pendulum==3.0.0 +pexpect==4.9.0 phonenumbers==8.13.0 -pickleshare==0.7.5 platformdirs==3.11.0 -pluggy==1.3.0 +pluggy==1.4.0 preshed==3.0.9 prison==0.2.1 -progressbar2==4.2.0 -prometheus-client==0.17.1 -prompt-toolkit==3.0.39 -proto-plus==1.22.3 -protobuf==4.24.4 -psutil==5.9.5 +progressbar2==4.4.2 +prometheus_client==0.20.0 +prompt-toolkit==3.0.43 +proto-plus==1.23.0 +protobuf==4.25.3 +psutil==5.9.8 psycopg2-binary==2.9.9 ptyprocess==0.7.0 pure-eval==0.2.2 pure-sasl==0.6.2 -py-partiql-parser==0.3.7 -pyarrow==11.0.0 -pyasn1==0.5.0 -pyasn1-modules==0.3.0 -pyathena==2.4.1 -pycountry==22.3.5 +py-partiql-parser==0.5.0 +pyarrow==15.0.2 +pyarrow-hotfix==0.6 +pyasn1==0.6.0 +pyasn1_modules==0.4.0 +pyathena==2.25.2 +pycountry==23.12.11 pycparser==2.21 -pycryptodome==3.19.0 -pycryptodomex==3.19.0 -pydantic==1.10.13 -pydash==7.0.6 -pydruid==0.6.5 -Pygments==2.16.1 +pycryptodome==3.20.0 +pydantic==1.10.14 +pydash==7.0.7 +pydruid==0.6.6 +Pygments==2.17.2 pyiceberg==0.4.0 -pymongo==4.5.0 +pymongo==4.6.2 PyMySQL==1.1.0 -pyOpenSSL==23.2.0 +pyOpenSSL==24.1.0 pyparsing==3.0.9 pyspnego==0.10.2 python-daemon==3.0.1 -python-dateutil==2.8.2 -python-dotenv==1.0.0 +python-dateutil==2.9.0.post0 python-jose==3.3.0 -python-ldap==3.4.3 +python-ldap==3.4.4 python-nvd3==0.15.0 -python-slugify==8.0.1 -python-stdnum==1.19 -python-tds==1.13.0 -python-utils==3.8.1 -python3-openid==3.2.0 -pytz==2023.3.post1 -pytzdata==2020.1 +python-slugify==8.0.4 +python-stdnum==1.20 +python-tds==1.15.0 +python-utils==3.8.2 +pytz==2024.1 PyYAML==6.0.1 pyzmq==24.0.1 -ratelimiter==1.2.0.post0 redash-toolbelt==0.1.9 -redshift-connector==2.0.914 -referencing==0.30.2 -regex==2023.10.3 +redshift-connector==2.1.0 +referencing==0.34.0 +regex==2023.12.25 requests==2.31.0 -requests-file==1.5.1 -requests-gssapi==1.2.3 +requests-file==2.0.0 +requests-gssapi==1.3.0 requests-ntlm==1.2.0 -requests-toolbelt==0.10.1 -responses==0.23.3 +requests-toolbelt==1.0.0 +responses==0.25.0 rfc3339-validator==0.1.4 rfc3986==2.0.0 -rich==13.6.0 -rich-argparse==1.3.0 -rpds-py==0.10.6 +rich==13.7.1 +rich-argparse==1.4.0 +rpds-py==0.18.0 rsa==4.9 ruamel.yaml==0.17.17 -ruamel.yaml.clib==0.2.8 -s3transfer==0.7.0 -schwifty==2023.9.0 -scipy==1.11.3 +s3transfer==0.10.1 +schwifty==2024.1.1.post0 +scipy==1.12.0 scramp==1.4.4 Send2Trash==1.8.2 -sentry-sdk==1.32.0 +sentry-sdk==1.43.0 setproctitle==1.3.3 simple-salesforce==1.12.5 six==1.16.0 +slack-sdk==3.18.1 smart-open==6.4.0 smmap==5.0.1 -sniffio==1.3.0 -snowflake-connector-python==3.2.1 -snowflake-sqlalchemy==1.5.0 +sniffio==1.3.1 +snowflake-connector-python==3.7.1 +snowflake-sqlalchemy==1.5.1 sortedcontainers==2.4.0 soupsieve==2.5 -spacy==3.4.3 +spacy==3.5.0 spacy-legacy==3.0.12 spacy-loggers==1.0.5 sql-metadata==2.2.2 SQLAlchemy==1.4.44 -sqlalchemy-bigquery==1.8.0 -SQLAlchemy-JSONField==1.0.1.post0 +sqlalchemy-bigquery==1.10.0 +SQLAlchemy-JSONField==1.0.2 sqlalchemy-pytds==0.3.5 sqlalchemy-redshift==0.8.14 -SQLAlchemy-Utils==0.41.1 -sqlalchemy2-stubs==0.0.2a35 +SQLAlchemy-Utils==0.41.2 sqllineage==1.3.8 sqlparse==0.4.4 srsly==2.4.8 stack-data==0.6.3 -starlette==0.27.0 strictyaml==1.7.3 tableauserverclient==0.25 -tableschema==1.20.2 +tableschema==1.20.10 tabulate==0.9.0 -tabulator==1.53.5 tenacity==8.2.3 -termcolor==2.3.0 -terminado==0.17.1 +teradatasql==20.0.0.8 +teradatasqlalchemy==17.20.0.0 +termcolor==2.4.0 +terminado==0.18.1 text-unidecode==1.3 thinc==8.1.12 -thrift==0.13.0 +thrift==0.16.0 thrift-sasl==0.4.3 +time-machine==2.14.1 tinycss2==1.2.1 toml==0.10.2 -tomli==2.0.1 -tomlkit==0.12.1 -toolz==0.12.0 -tornado==6.3.3 -tqdm==4.66.1 +tomlkit==0.12.4 +toolz==0.12.1 +tornado==6.4 +tqdm==4.66.2 traitlets==5.2.1.post0 -trino==0.327.0 -typeguard==2.13.3 +trino==0.328.0 typer==0.7.0 -types-PyYAML==6.0.12.12 typing-inspect==0.9.0 -typing_extensions==4.8.0 -tzlocal==5.1 -uc-micro-py==1.0.2 -ujson==5.8.0 +typing_extensions==4.10.0 +tzdata==2024.1 +tzlocal==5.2 +uc-micro-py==1.0.3 +ujson==5.9.0 unicodecsv==0.14.1 -urllib3==1.26.17 -uvicorn==0.23.2 -uvloop==0.17.0 -vertica-python==1.3.5 -vertica-sqlalchemy-dialect==0.0.8 -vininfo==1.7.0 -volatile==2.1.0 -wasabi==0.10.1 -watchfiles==0.20.0 -wcmatch==8.5 -wcwidth==0.2.8 +universal-pathlib==0.1.4 +urllib3==1.26.18 +vertica-python==1.3.8 +vertica-sqlalchemy-dialect==0.0.8.1 +vininfo==1.8.0 +wasabi==1.1.2 +wcmatch==8.5.1 +wcwidth==0.2.13 webencodings==0.5.1 -websocket-client==1.6.4 -websockets==11.0.3 -Werkzeug==2.2.3 -widgetsnbextension==4.0.9 -wrapt==1.15.0 -WTForms==3.1.0 +websocket-client==1.7.0 +Werkzeug==2.3.8 +widgetsnbextension==4.0.10 +wrapt==1.16.0 +WTForms==3.1.2 xlrd==2.0.1 xmltodict==0.13.0 -yarl==1.9.2 +yarl==1.9.4 zeep==4.2.1 -zstd==1.5.5.1 \ No newline at end of file +zipp==3.18.1 +zstd==1.5.5.1 diff --git a/docker/datahub-ingestion-base/build.gradle b/docker/datahub-ingestion-base/build.gradle index e0168290c48f86..faa0589cfbfbbf 100644 --- a/docker/datahub-ingestion-base/build.gradle +++ b/docker/datahub-ingestion-base/build.gradle @@ -12,7 +12,7 @@ ext { docker_target = project.getProperties().getOrDefault("dockerTarget", "slim") docker_version = "${version}${docker_target == 'slim' ? '-slim' : ''}" - revision = 2 // increment to trigger rebuild + revision = 3 // increment to trigger rebuild } docker { diff --git a/docker/datahub-ingestion-base/entrypoint.sh b/docker/datahub-ingestion-base/entrypoint.sh index 518bb215614678..73cc9358d03c9b 100644 --- a/docker/datahub-ingestion-base/entrypoint.sh +++ b/docker/datahub-ingestion-base/entrypoint.sh @@ -1,10 +1,10 @@ #!/usr/bin/bash -if [ ! -z "$ACTIONS_EXTRA_PACKAGES" ]; then - pip install --user $ACTIONS_EXTRA_PACKAGES +if [ -n "$ACTIONS_EXTRA_PACKAGES" ]; then + uv pip install $ACTIONS_EXTRA_PACKAGES fi -if [[ ! -z "$ACTIONS_CONFIG" && ! -z "$ACTIONS_EXTRA_PACKAGES" ]]; then +if [[ -n "$ACTIONS_CONFIG" && -n "$ACTIONS_EXTRA_PACKAGES" ]]; then mkdir -p /tmp/datahub/logs curl -q "$ACTIONS_CONFIG" -o config.yaml exec dockerize -wait ${DATAHUB_GMS_PROTOCOL:-http}://$DATAHUB_GMS_HOST:$DATAHUB_GMS_PORT/health -timeout 240s \ diff --git a/docker/datahub-ingestion-base/regenerate-base-requirements.sh b/docker/datahub-ingestion-base/regenerate-base-requirements.sh new file mode 100755 index 00000000000000..13d74922d9013b --- /dev/null +++ b/docker/datahub-ingestion-base/regenerate-base-requirements.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# This script is used to regenerate the base-requirements.txt file + +set -euxo pipefail +cd "$( dirname "${BASH_SOURCE[0]}" )" + +SCRIPT_NAME=$(basename "$0") +DATAHUB_DIR=$(pwd)/../.. + +# Create a virtualenv. +VENV_DIR=$(mktemp -d) +python -c "import sys; assert sys.version_info >= (3, 9), 'Python 3.9 or higher is required.'" +python -m venv $VENV_DIR +source $VENV_DIR/bin/activate +pip install --upgrade pip uv setuptools wheel +echo "Using virtualenv at $VENV_DIR" + +# Install stuff. +pushd $DATAHUB_DIR/metadata-ingestion +uv pip install -e '.[all]' -e '../metadata-ingestion-modules/airflow-plugin/[plugin-v2]' +popd + +# Generate the requirements file. +# Removing Flask deps due as per https://github.com/datahub-project/datahub/pull/6867/files +# Removing py4j and PyJWT due to https://github.com/datahub-project/datahub/pull/6868/files +# Removing pyspark and pydeequ because we don't want them in the slim image, so they can be added separately. +# TODO: It's unclear if these removals are still actually needed. +echo "# Generated requirements file. Run ./$SCRIPT_NAME to regenerate." > base-requirements.txt +pip freeze \ + | grep -v -E "^-e" \ + | grep -v -E "^uv==" \ + | grep -v "Flask-" \ + | grep -v -E "(py4j|PyJWT)==" \ + | grep -v -E "(pyspark|pydeequ)==" \ + >> base-requirements.txt diff --git a/docker/datahub-ingestion-base/smoke.Dockerfile b/docker/datahub-ingestion-base/smoke.Dockerfile index 15dc46ae5b882a..34654faaad729d 100644 --- a/docker/datahub-ingestion-base/smoke.Dockerfile +++ b/docker/datahub-ingestion-base/smoke.Dockerfile @@ -1,6 +1,6 @@ FROM acryldata/datahub-ingestion-base as base -RUN apt-get update && apt-get install -y \ +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ sudo \ python3-dev \ libgtk2.0-0 \ @@ -13,14 +13,16 @@ RUN apt-get update && apt-get install -y \ libasound2 \ libxtst6 \ xauth \ - xvfb - -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-11-jdk + xvfb \ + openjdk-17-jdk && \ + rm -rf /var/lib/apt/lists/* /var/cache/apk/* COPY . /datahub-src ARG RELEASE_VERSION -RUN cd /datahub-src/metadata-ingestion && \ - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ - cat src/datahub/__init__.py && \ - cd ../ && \ - ./gradlew :metadata-ingestion:installAll +RUN cd /datahub-src && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" metadata-ingestion/src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ + cat metadata-ingestion/src/datahub/__init__.py && \ + ./gradlew :metadata-ingestion:codegen && \ + pip install file:metadata-ingestion-modules/airflow-plugin#egg=acryl-datahub-airflow-plugin file:metadata-ingestion#egg=acryl-datahub + diff --git a/docker/datahub-ingestion/Dockerfile b/docker/datahub-ingestion/Dockerfile index 9516c31a19e21b..3f29417dca0d78 100644 --- a/docker/datahub-ingestion/Dockerfile +++ b/docker/datahub-ingestion/Dockerfile @@ -2,51 +2,51 @@ ARG APP_ENV=full ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head -ARG PIP_MIRROR_URL=null -ARG DEBIAN_REPO_URL=http://deb.debian.org/debian +ARG DEBIAN_REPO_URL=https://deb.debian.org/debian +ARG PIP_MIRROR_URL=https://pypi.python.org/simple FROM $BASE_IMAGE:$DOCKER_VERSION as base + +# Optionally set corporate mirror for deb USER 0 +ARG DEBIAN_REPO_URL +RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi +USER datahub + +# Optionally set corporate mirror for pip +ARG PIP_MIRROR_URL +RUN if [ "${PIP_MIRROR_URL}" != "https://pypi.python.org/simple" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi +ENV UV_INDEX_URL=${PIP_MIRROR_URL} -COPY ./metadata-ingestion /datahub-ingestion -COPY ./metadata-ingestion-modules/airflow-plugin /datahub-ingestion/airflow-plugin +COPY --chown=datahub ./metadata-ingestion /datahub-ingestion +COPY --chown=datahub ./metadata-ingestion-modules/airflow-plugin /datahub-ingestion/airflow-plugin ARG RELEASE_VERSION WORKDIR /datahub-ingestion -RUN sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ - sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ - cat src/datahub/__init__.py && \ - chown -R datahub /datahub-ingestion - -USER datahub -ENV PATH="/datahub-ingestion/.local/bin:$PATH" +RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ + sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" airflow-plugin/src/datahub_airflow_plugin/__init__.py && \ + cat src/datahub/__init__.py | grep __version__ && \ + cat airflow-plugin/src/datahub_airflow_plugin/__init__.py | grep __version__ FROM base as slim-install -ARG PIP_MIRROR_URL -RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi -RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" +RUN uv pip install --no-cache "acryl-datahub[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary] @ ." FROM base as full-install-build -ARG PIP_MIRROR_URL -ARG DEBIAN_REPO_URL USER 0 -RUN if [ "${DEBIAN_REPO_URL}" != "http://deb.debian.org/debian" ] ; then sed -i "s#http.*://deb.debian.org/debian#${DEBIAN_REPO_URL}#g" /etc/apt/sources.list.d/debian.sources ; fi RUN apt-get update && apt-get install -y -qq maven USER datahub COPY ./docker/datahub-ingestion/pyspark_jars.sh . -RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi -RUN pip install --no-cache --user ".[base]" && \ - pip install --no-cache --user "./airflow-plugin[acryl-datahub-airflow-plugin]" && \ - pip install --no-cache --user ".[all]" +RUN uv pip install --no-cache "acryl-datahub[base,all] @ ." "acryl-datahub-airflow-plugin[plugin-v2] @ ./airflow-plugin" && \ + datahub --version RUN ./pyspark_jars.sh FROM base as full-install -COPY --from=full-install-build /datahub-ingestion/.local /datahub-ingestion/.local +COPY --from=full-install-build ${VIRTUAL_ENV} ${VIRTUAL_ENV} FROM base as dev-install # Dummy stage for development. Assumes code is built on your machine and mounted to this image. @@ -55,4 +55,3 @@ FROM base as dev-install FROM ${APP_ENV}-install as final USER datahub -ENV PATH="/datahub-ingestion/.local/bin:$PATH" diff --git a/docker/datahub-ingestion/Dockerfile-slim-only b/docker/datahub-ingestion/Dockerfile-slim-only index 4112f470c25bee..a5f2a93e8a27bd 100644 --- a/docker/datahub-ingestion/Dockerfile-slim-only +++ b/docker/datahub-ingestion/Dockerfile-slim-only @@ -1,30 +1,28 @@ # Defining environment ARG BASE_IMAGE=acryldata/datahub-ingestion-base ARG DOCKER_VERSION=head-slim -ARG PIP_MIRROR_URL=null +ARG PIP_MIRROR_URL=https://pypi.python.org/simple FROM $BASE_IMAGE:$DOCKER_VERSION as base -USER 0 +USER datahub + +# Optionally set corporate mirror for apk and pip +ARG PIP_MIRROR_URL +RUN if [ "${PIP_MIRROR_URL}" != "https://pypi.python.org/simple" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi +ENV UV_INDEX_URL=${PIP_MIRROR_URL} -COPY ./metadata-ingestion /datahub-ingestion +COPY --chown=datahub ./metadata-ingestion /datahub-ingestion ARG RELEASE_VERSION WORKDIR /datahub-ingestion -RUN sed -i.bak "s/__version__ = \"1!0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/datahub/__init__.py && \ - cat src/datahub/__init__.py && \ - chown -R datahub /datahub-ingestion - -USER datahub -ENV PATH="/datahub-ingestion/.local/bin:$PATH" +RUN sed -i.bak "s/__version__ = \"1\!0.0.0.dev0\"/__version__ = \"$(echo $RELEASE_VERSION|sed s/-/+/)\"/" src/datahub/__init__.py && \ + cat src/datahub/__init__.py FROM base as slim-install -ARG PIP_MIRROR_URL - -RUN if [ "${PIP_MIRROR_URL}" != "null" ] ; then pip config set global.index-url ${PIP_MIRROR_URL} ; fi -RUN pip install --no-cache --user ".[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary]" +RUN uv pip install --no-cache "acryl-datahub[base,datahub-rest,datahub-kafka,snowflake,bigquery,redshift,mysql,postgres,hive,clickhouse,glue,dbt,looker,lookml,tableau,powerbi,superset,datahub-business-glossary] @ ." && \ + datahub --version FROM slim-install as final USER datahub -ENV PATH="/datahub-ingestion/.local/bin:$PATH" diff --git a/docker/datahub-ingestion/build.gradle b/docker/datahub-ingestion/build.gradle index 52db594e2ef852..b9ab546674a031 100644 --- a/docker/datahub-ingestion/build.gradle +++ b/docker/datahub-ingestion/build.gradle @@ -12,7 +12,7 @@ ext { docker_target = project.getProperties().getOrDefault("dockerTarget", "slim") docker_version = "${version}${docker_target == 'slim' ? '-slim' : ''}" - revision = 2 // increment to trigger rebuild + revision = 3 // increment to trigger rebuild } dependencies { @@ -33,7 +33,7 @@ docker { i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } - def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", '')] + def dockerBuildArgs = [DOCKER_VERSION: version, RELEASE_VERSION: version.replace('-SNAPSHOT', '').replace('v', '').replace("-slim", ''), BASE_IMAGE: "${docker_registry}/datahub-ingestion-base"] // Add build args if they are defined (needed for some CI or enterprise environments) if (project.hasProperty('pipMirrorUrl')) { @@ -45,9 +45,9 @@ docker { buildArgs(dockerBuildArgs) } -tasks.getByName('docker').dependsOn(['build', - ':docker:datahub-ingestion-base:docker', - ':metadata-ingestion:codegen']) +tasks.getByName('dockerPrepare').dependsOn(['build', + ':docker:datahub-ingestion-base:docker', + ':metadata-ingestion:codegen']) task mkdirBuildDocker { doFirst { diff --git a/docker/datahub-ingestion/pyspark_jars.sh b/docker/datahub-ingestion/pyspark_jars.sh index ab4b223f0358a5..833c3079b82df2 100755 --- a/docker/datahub-ingestion/pyspark_jars.sh +++ b/docker/datahub-ingestion/pyspark_jars.sh @@ -2,7 +2,7 @@ set -ex -PYSPARK_JARS="$(python -m site --user-site)/pyspark/jars" +PYSPARK_JARS="$(python -c 'import site; print(site.getsitepackages()[0])')/pyspark/jars" function replace_jar { JAR_PREFIX=$1 diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index 5bfa5f35ace179..9b7c6e762462e3 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-mae-consumer/start.sh b/docker/datahub-mae-consumer/start.sh index 2af7ce6855d1c5..f839d3646bdc6e 100755 --- a/docker/datahub-mae-consumer/start.sh +++ b/docker/datahub-mae-consumer/start.sh @@ -33,6 +33,9 @@ fi if [[ ${GRAPH_SERVICE_IMPL:-} != elasticsearch ]] && [[ ${SKIP_NEO4J_CHECK:-false} != true ]]; then dockerize_args+=("-wait" "$NEO4J_HOST") fi +if [[ "${KAFKA_SCHEMAREGISTRY_URL:-}" && ${SKIP_SCHEMA_REGISTRY_CHECK:-false} != true ]]; then + dockerize_args+=("-wait" "$KAFKA_SCHEMAREGISTRY_URL") +fi JAVA_TOOL_OPTIONS="${JDK_JAVA_OPTIONS:-}${JAVA_OPTS:+ $JAVA_OPTS}${JMX_OPTS:+ $JMX_OPTS}" if [[ ${ENABLE_OTEL:-false} == true ]]; then diff --git a/docker/datahub-mce-consumer/Dockerfile b/docker/datahub-mce-consumer/Dockerfile index cc79a3072c1930..4da94794e0ead3 100644 --- a/docker/datahub-mce-consumer/Dockerfile +++ b/docker/datahub-mce-consumer/Dockerfile @@ -38,11 +38,11 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ && apk --no-cache add jattach --repository ${ALPINE_REPO_URL}/edge/community/ \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin FROM base as prod-install diff --git a/docker/datahub-mce-consumer/start.sh b/docker/datahub-mce-consumer/start.sh index ef183d41856aaa..a00127a841188c 100755 --- a/docker/datahub-mce-consumer/start.sh +++ b/docker/datahub-mce-consumer/start.sh @@ -5,6 +5,11 @@ if [[ $SKIP_KAFKA_CHECK != true ]]; then WAIT_FOR_KAFKA=" -wait tcp://$(echo $KAFKA_BOOTSTRAP_SERVER | sed 's/,/ -wait tcp:\/\//g') " fi +WAIT_FOR_SCHEMA_REGISTRY="" +if [[ "$KAFKA_SCHEMAREGISTRY_URL" && $SKIP_SCHEMA_REGISTRY_CHECK != true ]]; then + WAIT_FOR_SCHEMA_REGISTRY="-wait $KAFKA_SCHEMAREGISTRY_URL" +fi + OTEL_AGENT="" if [[ $ENABLE_OTEL == true ]]; then OTEL_AGENT="-javaagent:opentelemetry-javaagent.jar " @@ -17,5 +22,6 @@ fi exec dockerize \ $WAIT_FOR_KAFKA \ + $WAIT_FOR_SCHEMA_REGISTRY \ -timeout 240s \ - java $JAVA_OPTS $JMX_OPTS $OTEL_AGENT $PROMETHEUS_AGENT -jar /datahub/datahub-mce-consumer/bin/mce-consumer-job.jar \ No newline at end of file + java $JAVA_OPTS $JMX_OPTS $OTEL_AGENT $PROMETHEUS_AGENT -jar /datahub/datahub-mce-consumer/bin/mce-consumer-job.jar diff --git a/docker/datahub-upgrade/Dockerfile b/docker/datahub-upgrade/Dockerfile index 2beb5b54dac383..cda13378be68ef 100644 --- a/docker/datahub-upgrade/Dockerfile +++ b/docker/datahub-upgrade/Dockerfile @@ -38,13 +38,13 @@ ENV JMX_VERSION=0.18.0 # PFP-260: Upgrade Sqlite to >=3.28.0-r0 to fix https://security.snyk.io/vuln/SNYK-ALPINE39-SQLITE-449762 RUN apk --no-cache --update-cache --available upgrade \ && apk --no-cache add curl bash coreutils gcompat sqlite libc6-compat java-snappy \ - && apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ - && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/9.4.46.v20220331/jetty-runner-9.4.46.v20220331.jar --output jetty-runner.jar \ - && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/9.4.46.v20220331/jetty-jmx-9.4.46.v20220331.jar --output jetty-jmx.jar \ - && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/9.4.46.v20220331/jetty-util-9.4.46.v20220331.jar --output jetty-util.jar \ + && apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-runner/11.0.19/jetty-runner-11.0.19.jar --output jetty-runner.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-jmx/11.0.19/jetty-jmx-11.0.19.jar --output jetty-jmx.jar \ + && curl -sS ${MAVEN_CENTRAL_REPO_URL}/org/eclipse/jetty/jetty-util/11.0.19/jetty-util-11.0.19.jar --output jetty-util.jar \ && wget --no-verbose ${GITHUB_REPO_URL}/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.24.0/opentelemetry-javaagent.jar \ && wget --no-verbose ${MAVEN_CENTRAL_REPO_URL}/io/prometheus/jmx/jmx_prometheus_javaagent/${JMX_VERSION}/jmx_prometheus_javaagent-${JMX_VERSION}.jar -O jmx_prometheus_javaagent.jar \ - && cp /usr/lib/jvm/java-11-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks + && cp /usr/lib/jvm/java-17-openjdk/jre/lib/security/cacerts /tmp/kafka.client.truststore.jks COPY --from=binary /go/bin/dockerize /usr/local/bin ENV LD_LIBRARY_PATH="/lib:/lib64" diff --git a/docker/datahub-upgrade/README.md b/docker/datahub-upgrade/README.md index 0d019971604d6b..9c96114cdb2dd9 100644 --- a/docker/datahub-upgrade/README.md +++ b/docker/datahub-upgrade/README.md @@ -15,8 +15,16 @@ to metadata_aspect_v2 table. Arguments: 2. **NoCodeDataMigrationCleanup**: Cleanses graph index, search index, and key-value store of legacy DataHub data (metadata_aspect table) once the No Code Data Migration has completed successfully. No arguments. -3. **RestoreIndices**: Restores indices by fetching the latest version of each aspect and producing MAE - +3. **RestoreIndices**: Restores indices by fetching the latest version of each aspect and producing MAE. Arguments: + - *batchSize* (Optional): The number of rows to migrate at a time. Defaults to 1000. + - *batchDelayMs* (Optional): The number of milliseconds of delay between migrated batches. Used for rate limiting. Defaults to 250. + - *numThreads* (Optional): The number of threads to use, defaults to 1. Note that this is not used if `urnBasedPagination` is true. + - *aspectName* (Optional): The aspect name for producing events. + - *urn* (Optional): The urn for producing events. + - *urnLike* (Optional): The urn pattern for producing events, using `%` as a wild card + - *urnBasedPagination* (Optional): Paginate the SQL results using the urn + aspect string instead of `OFFSET`. Defaults to false, + though should improve performance for large amounts of data. + 4. **RestoreBackup**: Restores the storage stack from a backup of the local database ## Environment Variables diff --git a/docker/dev-with-cassandra.sh b/docker/dev-with-cassandra.sh index f71d91de190807..6f9cf6b88e8607 100755 --- a/docker/dev-with-cassandra.sh +++ b/docker/dev-with-cassandra.sh @@ -23,13 +23,13 @@ fi # YOU MUST BUILD VIA GRADLE BEFORE RUNNING THIS. DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd $DIR && \ - COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose \ + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose \ -f docker-compose-with-cassandra.yml \ -f docker-compose.dev.yml \ $CONSUMERS_COMPOSE $MONITORING_COMPOSE $M1_COMPOSE \ pull \ && \ - COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose -p datahub \ + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose -p datahub \ -f docker-compose-with-cassandra.yml \ -f docker-compose.dev.yml \ $CONSUMERS_COMPOSE $MONITORING_COMPOSE $M1_COMPOSE \ diff --git a/docker/dev-without-neo4j.sh b/docker/dev-without-neo4j.sh index 07e51840bece76..78a8f4e1161be8 100755 --- a/docker/dev-without-neo4j.sh +++ b/docker/dev-without-neo4j.sh @@ -23,13 +23,13 @@ fi # Launches dev instances of DataHub images. See documentation for more details. # YOU MUST BUILD VIA GRADLE BEFORE RUNNING THIS. cd "${DIR}/../.." && \ - COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose \ + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose \ -f "${DIR}/docker-compose-without-neo4j.yml" \ -f "${DIR}/docker-compose-without-neo4j.override.yml" \ -f "${DIR}/docker-compose.dev.yml" \ $CONSUMERS_COMPOSE $MONITORING_COMPOSE $M1_COMPOSE pull \ && \ - COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose -p datahub \ + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose -p datahub \ -f "${DIR}/docker-compose-without-neo4j.yml" \ -f "${DIR}/docker-compose-without-neo4j.override.yml" \ -f "${DIR}/docker-compose.dev.yml" \ diff --git a/docker/dev.sh b/docker/dev.sh index 9f7fafdaf3d5ec..86f58a416daf70 100755 --- a/docker/dev.sh +++ b/docker/dev.sh @@ -23,13 +23,13 @@ fi # YOU MUST BUILD VIA GRADLE BEFORE RUNNING THIS. DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd $DIR && \ - COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose \ + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose \ -f docker-compose.yml \ -f docker-compose.override.yml \ -f docker-compose.dev.yml \ $CONSUMERS_COMPOSE $MONITORING_COMPOSE $M1_COMPOSE pull \ && \ - COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose -p datahub \ + COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose -p datahub \ -f docker-compose.yml \ -f docker-compose.override.yml \ -f docker-compose.dev.yml \ diff --git a/docker/docker-compose-with-cassandra.yml b/docker/docker-compose-with-cassandra.yml index 48239fcd87831e..d722b07b9a7af4 100644 --- a/docker/docker-compose-with-cassandra.yml +++ b/docker/docker-compose-with-cassandra.yml @@ -7,9 +7,8 @@ version: '3.9' services: datahub-frontend-react: - container_name: datahub-frontend-react hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - 9002:9002 build: @@ -22,7 +21,6 @@ services: volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-actions: - container_name: datahub-actions hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} env_file: datahub-actions/env/docker.env @@ -33,9 +31,8 @@ services: datahub-gms: condition: service_healthy datahub-gms: - container_name: datahub-gms hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - 8080:8080 build: @@ -54,7 +51,6 @@ services: volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-upgrade: - container_name: datahub-upgrade hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} command: @@ -76,7 +72,6 @@ services: schema-registry: condition: service_healthy cassandra-setup: - container_name: cassandra-setup hostname: cassandra-setup image: cassandra:3.11 command: /bin/bash -c "cqlsh cassandra -f /init.cql" @@ -89,9 +84,8 @@ services: datahub_setup_job: true # This "container" is a workaround to pre-create search indices elasticsearch-setup: - container_name: elasticsearch-setup hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} build: context: ../ dockerfile: docker/elasticsearch-setup/Dockerfile @@ -105,7 +99,6 @@ services: labels: datahub_setup_job: true cassandra: - container_name: cassandra hostname: cassandra image: cassandra:3.11 ports: @@ -118,7 +111,6 @@ services: volumes: - cassandradata:/var/lib/cassandra elasticsearch: - container_name: elasticsearch hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} ports: @@ -136,7 +128,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data neo4j: - container_name: neo4j hostname: neo4j image: neo4j:4.0.6 ports: @@ -152,7 +143,6 @@ services: volumes: - neo4jdata:/data schema-registry: - container_name: schema-registry hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 ports: @@ -168,7 +158,6 @@ services: broker: condition: service_healthy broker: - container_name: broker hostname: broker image: confluentinc/cp-kafka:7.4.0 ports: @@ -187,7 +176,6 @@ services: volumes: - broker:/var/lib/kafka/data/ zookeeper: - container_name: zookeeper hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 ports: diff --git a/docker/docker-compose-without-neo4j.override.yml b/docker/docker-compose-without-neo4j.override.yml index 36f3c974b93afc..37ae41e383e7c5 100644 --- a/docker/docker-compose-without-neo4j.override.yml +++ b/docker/docker-compose-without-neo4j.override.yml @@ -12,7 +12,6 @@ services: volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-upgrade: - container_name: datahub-upgrade hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} command: @@ -30,7 +29,6 @@ services: kafka-setup: condition: service_completed_successfully mysql-setup: - container_name: mysql-setup hostname: mysql-setup image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head} build: @@ -46,9 +44,8 @@ services: environment: - DATAHUB_PRECREATE_TOPICS=${DATAHUB_PRECREATE_TOPICS:-false} mysql: - container_name: mysql hostname: mysql - image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 diff --git a/docker/docker-compose-without-neo4j.postgres.override.yml b/docker/docker-compose-without-neo4j.postgres.override.yml index 369b5a155fc36b..dd7590ffe09b9d 100644 --- a/docker/docker-compose-without-neo4j.postgres.override.yml +++ b/docker/docker-compose-without-neo4j.postgres.override.yml @@ -16,7 +16,6 @@ services: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-upgrade: - container_name: datahub-upgrade hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} command: @@ -37,7 +36,6 @@ services: condition: service_completed_successfully postgres-setup: - container_name: postgres-setup hostname: postgres-setup image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-acryldata/datahub-postgres-setup}:${DATAHUB_VERSION:-head} build: @@ -51,7 +49,6 @@ services: datahub_setup_job: true postgres: - container_name: postgres hostname: postgres image: postgres:${DATAHUB_POSTGRES_VERSION:-12.3} env_file: postgres/env/docker.env diff --git a/docker/docker-compose-without-neo4j.yml b/docker/docker-compose-without-neo4j.yml index 6191994eaa1ea5..eae36fb849fd5c 100644 --- a/docker/docker-compose-without-neo4j.yml +++ b/docker/docker-compose-without-neo4j.yml @@ -7,9 +7,8 @@ version: '3.9' services: datahub-frontend-react: - container_name: datahub-frontend-react hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 build: @@ -23,7 +22,6 @@ services: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-actions: - container_name: datahub-actions hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} env_file: datahub-actions/env/docker.env @@ -34,15 +32,16 @@ services: datahub-gms: condition: service_healthy datahub-gms: - container_name: datahub-gms hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 build: context: ../ dockerfile: docker/datahub-gms/Dockerfile env_file: datahub-gms/env/docker-without-neo4j.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} healthcheck: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health start_period: 90s @@ -55,7 +54,6 @@ services: volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-upgrade: - container_name: datahub-upgrade hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} command: @@ -74,9 +72,8 @@ services: datahub_setup_job: true # This "container" is a workaround to pre-create search indices elasticsearch-setup: - container_name: elasticsearch-setup hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} build: context: ../ dockerfile: docker/elasticsearch-setup/Dockerfile @@ -90,9 +87,8 @@ services: labels: datahub_setup_job: true kafka-setup: - container_name: kafka-setup hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} build: dockerfile: ./docker/kafka-setup/Dockerfile context: ../ @@ -105,7 +101,6 @@ services: labels: datahub_setup_job: true elasticsearch: - container_name: elasticsearch hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} ports: @@ -127,7 +122,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data schema-registry: - container_name: schema-registry hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 ports: @@ -143,7 +137,6 @@ services: broker: condition: service_healthy broker: - container_name: broker hostname: broker image: confluentinc/cp-kafka:7.4.0 ports: @@ -161,7 +154,6 @@ services: volumes: - broker:/var/lib/kafka/data/ zookeeper: - container_name: zookeeper hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 ports: diff --git a/docker/docker-compose.consumers-without-neo4j.yml b/docker/docker-compose.consumers-without-neo4j.yml index 8228951d9385f8..f1aa6b30cede09 100644 --- a/docker/docker-compose.consumers-without-neo4j.yml +++ b/docker/docker-compose.consumers-without-neo4j.yml @@ -6,19 +6,20 @@ services: - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=false datahub-mae-consumer: - container_name: datahub-mae-consumer hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 build: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile env_file: datahub-mae-consumer/env/docker-without-neo4j.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} datahub-mce-consumer: - container_name: datahub-mce-consumer hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 build: @@ -28,3 +29,5 @@ services: environment: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_TELEMETRY_ENABLED=${DATAHUB_TELEMETRY_ENABLED:-true} + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} diff --git a/docker/docker-compose.consumers.dev.yml b/docker/docker-compose.consumers.dev.yml index 5c272a9cf9b8ae..00f7b52df151f3 100644 --- a/docker/docker-compose.consumers.dev.yml +++ b/docker/docker-compose.consumers.dev.yml @@ -1,7 +1,7 @@ version: '3.9' services: datahub-mae-consumer: - image: linkedin/datahub-mae-consumer:debug + image: acryldata/datahub-mae-consumer:debug build: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile @@ -13,7 +13,7 @@ services: - ../metadata-jobs/mae-consumer-job/build/libs/:/datahub/datahub-mae-consumer/bin/ - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml datahub-mce-consumer: - image: linkedin/datahub-mce-consumer:debug + image: acryldata/datahub-mce-consumer:debug build: context: ../ dockerfile: docker/datahub-mce-consumer/Dockerfile diff --git a/docker/docker-compose.consumers.yml b/docker/docker-compose.consumers.yml index 2d37094035859b..74b9adaeb99485 100644 --- a/docker/docker-compose.consumers.yml +++ b/docker/docker-compose.consumers.yml @@ -6,22 +6,23 @@ services: - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=false datahub-mae-consumer: - container_name: datahub-mae-consumer hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 build: context: ../ dockerfile: docker/datahub-mae-consumer/Dockerfile env_file: datahub-mae-consumer/env/docker.env + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} depends_on: neo4j: condition: service_healthy datahub-mce-consumer: - container_name: datahub-mce-consumer hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 build: @@ -36,6 +37,8 @@ services: - NEO4J_USERNAME=neo4j - NEO4J_PASSWORD=datahub - GRAPH_SERVICE_IMPL=neo4j + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} depends_on: neo4j: condition: service_healthy diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index 774c4e17bee21f..b6ac43a9eda434 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -11,7 +11,7 @@ version: '3.9' services: datahub-frontend-react: - image: linkedin/datahub-frontend-react:debug + image: acryldata/datahub-frontend-react:head ports: - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 @@ -24,9 +24,10 @@ services: - JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 - DATAHUB_ANALYTICS_ENABLED=${DATAHUB_ANALYTICS_ENABLED:-true} volumes: - - ../datahub-frontend/build/stage/playBinary:/datahub-frontend + - ../datahub-frontend/build/stage/main:/datahub-frontend + - ./monitoring/client-prometheus-config.yaml:/datahub-frontend/client-prometheus-config.yaml datahub-gms: - image: linkedin/datahub-gms:debug + image: acryldata/datahub-gms:debug ports: - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 @@ -45,6 +46,8 @@ services: - SEARCH_SERVICE_ENABLE_CACHE=false - LINEAGE_SEARCH_CACHE_ENABLED=false - SHOW_BROWSE_V2=true + - ER_MODEL_RELATIONSHIP_FEATURE_ENABLED=false + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} volumes: - ./datahub-gms/start.sh:/datahub/datahub-gms/scripts/start.sh - ./datahub-gms/jetty.xml:/datahub/datahub-gms/scripts/jetty.xml @@ -73,7 +76,7 @@ services: - ${HOME}/.datahub/plugins:/etc/datahub/plugins # Pre-creates the search indices using local mapping/settings.json elasticsearch-setup: - image: linkedin/datahub-elasticsearch-setup:debug + image: acryldata/datahub-elasticsearch-setup:head build: context: elasticsearch-setup dockerfile: Dockerfile diff --git a/docker/docker-compose.override.yml b/docker/docker-compose.override.yml index ef13b86a3d1514..d443a3f4629dfd 100644 --- a/docker/docker-compose.override.yml +++ b/docker/docker-compose.override.yml @@ -14,7 +14,6 @@ services: environment: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} mysql-setup: - container_name: mysql-setup hostname: mysql-setup image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head} build: @@ -30,9 +29,8 @@ services: environment: - DATAHUB_PRECREATE_TOPICS=${DATAHUB_PRECREATE_TOPICS:-false} mysql: - container_name: mysql hostname: mysql - image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 diff --git a/docker/docker-compose.tools.yml b/docker/docker-compose.tools.yml index 99e36d8ba4511c..8d2c30c64e6c8c 100644 --- a/docker/docker-compose.tools.yml +++ b/docker/docker-compose.tools.yml @@ -6,7 +6,6 @@ services: image: confluentinc/cp-kafka-rest:7.4.0 env_file: kafka-rest-proxy/env/docker.env hostname: kafka-rest-proxy - container_name: kafka-rest-proxy ports: - "8082:8082" depends_on: @@ -18,7 +17,6 @@ services: image: landoop/kafka-topics-ui:0.9.4 env_file: kafka-topics-ui/env/docker.env hostname: kafka-topics-ui - container_name: kafka-topics-ui ports: - "18000:8000" depends_on: @@ -30,7 +28,6 @@ services: kibana: image: kibana:7.10.1 env_file: kibana/env/docker.env - container_name: kibana hostname: kibana ports: - "5601:5601" diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 95f56fe47e3cca..96f37496859a46 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -7,9 +7,8 @@ version: '3.9' services: datahub-frontend-react: - container_name: datahub-frontend-react hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 build: @@ -22,7 +21,6 @@ services: volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-actions: - container_name: datahub-actions hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} env_file: datahub-actions/env/docker.env @@ -33,9 +31,10 @@ services: datahub-gms: condition: service_healthy datahub-gms: - container_name: datahub-gms hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} + environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 build: @@ -53,7 +52,6 @@ services: volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-upgrade: - container_name: datahub-upgrade hostname: datahub-upgrade image: ${DATAHUB_UPGRADE_IMAGE:-acryldata/datahub-upgrade}:${DATAHUB_VERSION:-head} command: @@ -76,9 +74,8 @@ services: condition: service_healthy # This "container" is a workaround to pre-create search indices elasticsearch-setup: - container_name: elasticsearch-setup hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} build: context: ../ dockerfile: docker/elasticsearch-setup/Dockerfile @@ -95,9 +92,8 @@ services: # This is not required in most cases, kept here for backwards compatibility with older clients that # explicitly wait for this container kafka-setup: - container_name: kafka-setup hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} build: dockerfile: ./docker/kafka-setup/Dockerfile context: ../ @@ -110,7 +106,6 @@ services: labels: datahub_setup_job: true elasticsearch: - container_name: elasticsearch hostname: elasticsearch image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} ports: @@ -132,7 +127,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data neo4j: - container_name: neo4j hostname: neo4j image: neo4j:4.4.9-community ports: @@ -148,7 +142,6 @@ services: volumes: - neo4jdata:/data schema-registry: - container_name: schema-registry hostname: schema-registry image: confluentinc/cp-schema-registry:7.4.0 ports: @@ -164,7 +157,6 @@ services: broker: condition: service_healthy broker: - container_name: broker hostname: broker image: confluentinc/cp-kafka:7.4.0 ports: @@ -182,7 +174,6 @@ services: volumes: - broker:/var/lib/kafka/data/ zookeeper: - container_name: zookeeper hostname: zookeeper image: confluentinc/cp-zookeeper:7.4.0 ports: diff --git a/docker/elasticsearch-setup/Dockerfile b/docker/elasticsearch-setup/Dockerfile index ea64f94f887276..fdaf9ddbaf813b 100644 --- a/docker/elasticsearch-setup/Dockerfile +++ b/docker/elasticsearch-setup/Dockerfile @@ -44,9 +44,9 @@ FROM base AS dev-install # See this excellent thread https://github.com/docker/cli/issues/1134 FROM ${APP_ENV}-install AS final + CMD if [ "$ELASTICSEARCH_USE_SSL" == "true" ]; then ELASTICSEARCH_PROTOCOL=https; else ELASTICSEARCH_PROTOCOL=http; fi \ && if [[ -n "$ELASTICSEARCH_USERNAME" ]]; then ELASTICSEARCH_HTTP_HEADERS="Authorization: Basic $(echo -ne "$ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD" | base64)"; else ELASTICSEARCH_HTTP_HEADERS="Accept: */*"; fi \ && if [[ "$SKIP_ELASTICSEARCH_CHECK" != "true" ]]; then \ dockerize -wait $ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT -wait-http-header "${ELASTICSEARCH_HTTP_HEADERS}" -timeout 120s /create-indices.sh; \ else /create-indices.sh; fi - diff --git a/docker/elasticsearch-setup/create-indices.sh b/docker/elasticsearch-setup/create-indices.sh index 81cf405bf4b3dc..d0a1bfbaed0f78 100755 --- a/docker/elasticsearch-setup/create-indices.sh +++ b/docker/elasticsearch-setup/create-indices.sh @@ -103,6 +103,36 @@ function create_if_not_exists { fi } +# Update ISM policy. Non-fatal if policy cannot be updated. +function update_ism_policy { + RESOURCE_ADDRESS="$1" + RESOURCE_DEFINITION_NAME="$2" + + TMP_CURRENT_POLICY_PATH="/tmp/current-$RESOURCE_DEFINITION_NAME" + + # Get existing policy + RESOURCE_STATUS=$(curl "${CURL_ARGS[@]}" -o $TMP_CURRENT_POLICY_PATH -w "%{http_code}\n" "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS") + echo -e "\n>>> GET $RESOURCE_ADDRESS response code is $RESOURCE_STATUS" + + if [ $RESOURCE_STATUS -ne 200 ]; then + echo -e ">>> Could not get ISM policy $RESOURCE_ADDRESS. Ignoring." + return + fi + + SEQ_NO=$(cat $TMP_CURRENT_POLICY_PATH | jq -r '._seq_no') + PRIMARY_TERM=$(cat $TMP_CURRENT_POLICY_PATH | jq -r '._primary_term') + + TMP_NEW_RESPONSE_PATH="/tmp/response-$RESOURCE_DEFINITION_NAME" + TMP_NEW_POLICY_PATH="/tmp/new-$RESOURCE_DEFINITION_NAME" + sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" \ + | sed -e "s/DUE_SHARDS/$DUE_SHARDS/g" \ + | sed -e "s/DUE_REPLICAS/$DUE_REPLICAS/g" \ + | tee -a "$TMP_NEW_POLICY_PATH" + RESOURCE_STATUS=$(curl "${CURL_ARGS[@]}" -XPUT "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS?if_seq_no=$SEQ_NO&if_primary_term=$PRIMARY_TERM" \ + -H 'Content-Type: application/json' -w "%{http_code}\n" -o $TMP_NEW_RESPONSE_PATH --data "@$TMP_NEW_POLICY_PATH") + echo -e "\n>>> PUT $RESOURCE_ADDRESS response code is $RESOURCE_STATUS" +} + # create indices for ES (non-AWS) function create_datahub_usage_event_datastream() { # non-AWS env requires creation of three resources for Datahub usage events: @@ -120,6 +150,11 @@ function create_datahub_usage_event_aws_elasticsearch() { # 1. ISM policy create_if_not_exists "_opendistro/_ism/policies/${PREFIX}datahub_usage_event_policy" aws_es_ism_policy.json + # 1.1 ISM policy update if it already existed + if [ $RESOURCE_STATUS -eq 200 ]; then + update_ism_policy "_opendistro/_ism/policies/${PREFIX}datahub_usage_event_policy" aws_es_ism_policy.json + fi + # 2. index template create_if_not_exists "_template/${PREFIX}datahub_usage_event_index_template" aws_es_index_template.json @@ -165,4 +200,4 @@ else elif [ $DATAHUB_USAGE_EVENT_INDEX_RESPONSE_CODE -eq 403 ]; then echo -e "Forbidden so exiting" fi -fi \ No newline at end of file +fi diff --git a/docker/ingestion/docker-compose.yml b/docker/ingestion/docker-compose.yml index 4abcdc34c0302f..06d4e47aa4a404 100644 --- a/docker/ingestion/docker-compose.yml +++ b/docker/ingestion/docker-compose.yml @@ -5,9 +5,8 @@ services: build: context: ../../ dockerfile: docker/datahub-ingestion/Dockerfile - image: linkedin/datahub-ingestion:${DATAHUB_VERSION:-head} + image: acryldata/datahub-ingestion:${DATAHUB_VERSION:-head} hostname: ingestion - container_name: ingestion command: "ingest -c /sample_recipe.yml" volumes: # Most of the config is embedded inside the sample recipe file. diff --git a/docker/ingestion/ingestion.sh b/docker/ingestion/ingestion.sh index 6d88b395d759b9..8fa7b8c565ff0d 100755 --- a/docker/ingestion/ingestion.sh +++ b/docker/ingestion/ingestion.sh @@ -2,4 +2,4 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" export DATAHUB_VERSION=${DATAHUB_VERSION:-head} -cd $DIR && docker-compose pull && docker-compose -p datahub up +cd $DIR && docker compose pull && docker compose -p datahub up diff --git a/docker/kafka-setup/Dockerfile b/docker/kafka-setup/Dockerfile index f6a4b62a793562..7265e35a85cab0 100644 --- a/docker/kafka-setup/Dockerfile +++ b/docker/kafka-setup/Dockerfile @@ -22,7 +22,7 @@ ARG ALPINE_REPO_URL ARG APACHE_DOWNLOAD_URL ARG GITHUB_REPO_URL -ENV KAFKA_VERSION 3.4.1 +ENV KAFKA_VERSION 3.5.2 ENV SCALA_VERSION 2.13 LABEL name="kafka" version=${KAFKA_VERSION} @@ -31,7 +31,7 @@ LABEL name="kafka" version=${KAFKA_VERSION} RUN if [ "${ALPINE_REPO_URL}" != "http://dl-cdn.alpinelinux.org/alpine" ] ; then sed -i "s#http.*://dl-cdn.alpinelinux.org/alpine#${ALPINE_REPO_URL}#g" /etc/apk/repositories ; fi RUN apk add --no-cache bash coreutils -RUN apk --no-cache add openjdk11-jre-headless --repository=${ALPINE_REPO_URL}/edge/community +RUN apk --no-cache add openjdk17-jre-headless --repository=${ALPINE_REPO_URL}/edge/community RUN apk add --no-cache -t .build-deps git curl ca-certificates jq gcc musl-dev libffi-dev zip RUN mkdir -p /opt \ @@ -52,8 +52,8 @@ RUN ls -la COPY --from=confluent_base /usr/share/java/cp-base-new/ /usr/share/java/cp-base-new/ COPY --from=confluent_base /etc/cp-base-new/log4j.properties /etc/cp-base-new/log4j.properties -ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /usr/share/java/cp-base-new -ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v1.1.6/aws-msk-iam-auth-1.1.6-all.jar /opt/kafka/libs +ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v2.0.3/aws-msk-iam-auth-2.0.3-all.jar /usr/share/java/cp-base-new +ADD --chown=kafka:kafka ${GITHUB_REPO_URL}/aws/aws-msk-iam-auth/releases/download/v2.0.3/aws-msk-iam-auth-2.0.3-all.jar /opt/kafka/libs ENV METADATA_AUDIT_EVENT_NAME="MetadataAuditEvent_v4" ENV METADATA_CHANGE_EVENT_NAME="MetadataChangeEvent_v4" diff --git a/docker/mariadb/docker-compose.mariadb.yml b/docker/mariadb/docker-compose.mariadb.yml index 5f9d1220832565..8a3a917c64b5c9 100644 --- a/docker/mariadb/docker-compose.mariadb.yml +++ b/docker/mariadb/docker-compose.mariadb.yml @@ -3,7 +3,6 @@ version: '3.8' services: mariadb: - container_name: mariadb hostname: mariadb image: mariadb:10.5 env_file: env/docker.env diff --git a/docker/monitoring/docker-compose.monitoring.yml b/docker/monitoring/docker-compose.monitoring.yml index 8265b5341a02d2..c6fa019cf99fcd 100644 --- a/docker/monitoring/docker-compose.monitoring.yml +++ b/docker/monitoring/docker-compose.monitoring.yml @@ -38,7 +38,6 @@ services: - '14268' - '14250' prometheus: - container_name: prometheus image: prom/prometheus:latest volumes: - ./monitoring/prometheus.yaml:/etc/prometheus/prometheus.yml diff --git a/docker/mysql-setup/init.sql b/docker/mysql-setup/init.sql index b6a1d47fb2a022..b74761ebc488a4 100644 --- a/docker/mysql-setup/init.sql +++ b/docker/mysql-setup/init.sql @@ -14,7 +14,7 @@ create table if not exists metadata_aspect_v2 ( createdfor varchar(255), constraint pk_metadata_aspect_v2 primary key (urn,aspect,version), INDEX timeIndex (createdon) -); +) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin; -- create default records for datahub user if not exists DROP TABLE if exists temp_metadata_aspect_v2; diff --git a/docker/mysql/docker-compose.mysql.yml b/docker/mysql/docker-compose.mysql.yml index 853d0c425ea61a..d8c7767985000f 100644 --- a/docker/mysql/docker-compose.mysql.yml +++ b/docker/mysql/docker-compose.mysql.yml @@ -3,9 +3,8 @@ version: '3.8' services: mysql: - container_name: mysql hostname: mysql - image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} env_file: env/docker.env command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin ports: diff --git a/docker/nuke.sh b/docker/nuke.sh index 875b739e9f48d1..364773b77b10e4 100755 --- a/docker/nuke.sh +++ b/docker/nuke.sh @@ -4,8 +4,8 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd $DIR # Tear down and clean up all DataHub-related containers, volumes, and network -docker-compose -p datahub down -v -docker-compose rm -f -v +docker compose -p datahub down -v +docker compose rm -f -v # Tear down ingestion container -(cd ingestion && docker-compose -p datahub down -v) +(cd ingestion && docker compose -p datahub down -v) diff --git a/docker/postgres-setup/init.sql b/docker/postgres-setup/init.sql index 72b2f73192e00f..f2bda8b2e1dd9f 100644 --- a/docker/postgres-setup/init.sql +++ b/docker/postgres-setup/init.sql @@ -14,7 +14,7 @@ CREATE TABLE IF NOT EXISTS metadata_aspect_v2 ( create index timeIndex ON metadata_aspect_v2 (createdon); -- create default records for datahub user if not exists -CREATE TEMP TABLE temp_metadata_aspect_v2 AS TABLE metadata_aspect_v2; +CREATE TEMP TABLE temp_metadata_aspect_v2 AS TABLE metadata_aspect_v2 WITH NO DATA; INSERT INTO temp_metadata_aspect_v2 (urn, aspect, version, metadata, createdon, createdby) VALUES( 'urn:li:corpuser:datahub', 'corpUserInfo', diff --git a/docker/profiles/README.md b/docker/profiles/README.md new file mode 100644 index 00000000000000..fb3c9e3c84a7a2 --- /dev/null +++ b/docker/profiles/README.md @@ -0,0 +1,104 @@ +# Docker Compose Profiles + +This directory contains a set of docker compose definitions which are designed to run several configurations +for quickstart use-cases as well as development use-cases. These configurations cover a few of the wide variety of +infrastructure configurations that DataHub can operate on. + +Requirements: +* Using profiles requires docker compose >= 2.20. +* If using the debug/development profiles, you will need to have built the `debug` docker images locally. See the Development Profiles section for more details. + +```bash +$ cd docker/profiles +$ docker compose --profile up +``` + +Use Control-c (`^c`) to terminate the running system. This will automatically stop all running containers. + +To remove the containers use the following: + +```bash +docker compose --profile rm +``` + +Please refer to docker's documentation for more details. + +The following sections detail a few of the profiles and their intended use-cases. For a complete list of profiles +and their configuration please see the table at the end of each section. + +## Quickstart Profiles + +Quickstart profiles are primarily a way to test drive DataHub features before committing to a production ready deployment. +A couple of these profiles are also used in our continuous integration (CI) tests. + +Note: Quickstart profiles use docker images with the `head` tag. These images up updated when changes are committed +to the DataHub github repository. This can be overridden to use a stable release tag by prefixing the commands with +`DATAHUB_VERSION=v0.12.1` for example. + +### `quickstart` + +This is the default configuration MySQL and OpenSearch for the storage and GMS running with integrated consumers. + +### `quickstart-consumers` + +This configuration is identical to `quickstart` how it runs standalone consumers instead of consumers integrated with the GMS container. + +### `quickstart-postgres` + +Identical to `quickstart` with Postgres instead of MySQL. + +### `quickstart-cassandra` + +Uses Cassandra as the primary data store along with Neo4j as the graph database. + +### `quickstart-storage` + +Just run the `quickstart` data stores without the DataHub components. This mode is useful for debugging when running the frontend and GMS components outside +of docker. + +### Quickstart Profiles Table +| Profile Name | MySQL | Postgres | Cassandra | Neo4j | Frontend | GMS | Actions | SystemUpdate | MAE | MCE | Kafka | OpenSearch | +|----------------------|-------|----------|-----------|-------|----------|-----|---------|--------------|-----|-----|-------|------------| +| quickstart | X | | | | X | X | X | X | | | X | X | +| quickstart-frontend | X | | | | X | | | X | | | X | X | +| quickstart-backend | X | | | | | X | X | X | | | X | X | +| quickstart-postgres | | X | | | X | X | X | X | | | X | X | +| quickstart-cassandra | | | X | X | X | X | X | X | | | X | X | +| quickstart-consumers | X | | | | X | X | X | X | X | X | X | X | +| quickstart-storage | X | | | | | | | | | | X | X | + +## Development Profiles + +* Runs `debug` tagged images +* JVM Debug Mode Enabled +* Exposes local jars and scripts to the containers +* Can run non-default one-off configurations (neo4j, cassandra, elasticsearch) + +The docker images used are the `debug` images which are created by building locally. These images are +created by running the gradle command. + +```bash +./gradlew dockerTagDebug +``` + +For a complete list of profiles see the table at the end of this section. + +### `quickstart-backend` + +Run everything except for the `frontend` component. Useful for running just a local (non-docker) frontend. + +### `quickstart-frontend` + +Runs everything except for the GMS. Useful for running just a local (non-docker) GMS instance. + +### Development Profiles Table +| Profile Name | MySQL | Postgres | Cassandra | Neo4j | Frontend | GMS | Actions | SystemUpdate | MAE | MCE | Kafka | OpenSearch | Elasticsearch | +|---------------------|-------|----------|-----------|-------|----------|-----|---------|--------------|-----|-----|-------|------------|---------------| +| debug | X | | | | X | X | X | X | | | X | X | | +| debug-frontend | X | | | | X | | | X | | | X | X | | +| debug-backend | X | | | | | X | X | X | | | X | X | | +| debug-postgres | | X | | | X | X | X | X | | | X | X | | +| debug-cassandra | | | X | | X | X | X | X | | | X | X | | +| debug-consumers | X | | | | X | X | X | X | X | X | X | X | | +| debug-neo4j | X | | | X | X | X | X | X | | | X | X | | +| debug-elasticsearch | X | | | | X | X | X | X | | | X | | X | \ No newline at end of file diff --git a/docker/profiles/cassandra b/docker/profiles/cassandra new file mode 120000 index 00000000000000..d9af9adbce5cad --- /dev/null +++ b/docker/profiles/cassandra @@ -0,0 +1 @@ +../cassandra \ No newline at end of file diff --git a/docker/profiles/datahub-actions b/docker/profiles/datahub-actions new file mode 120000 index 00000000000000..fea4275be45ffc --- /dev/null +++ b/docker/profiles/datahub-actions @@ -0,0 +1 @@ +../datahub-actions/ \ No newline at end of file diff --git a/docker/profiles/datahub-frontend b/docker/profiles/datahub-frontend new file mode 120000 index 00000000000000..74a18b81b7e3b8 --- /dev/null +++ b/docker/profiles/datahub-frontend @@ -0,0 +1 @@ +../datahub-frontend \ No newline at end of file diff --git a/docker/profiles/datahub-gms b/docker/profiles/datahub-gms new file mode 120000 index 00000000000000..de2f067e4c0e0d --- /dev/null +++ b/docker/profiles/datahub-gms @@ -0,0 +1 @@ +../datahub-gms \ No newline at end of file diff --git a/docker/profiles/datahub-mae-consumer b/docker/profiles/datahub-mae-consumer new file mode 120000 index 00000000000000..90974047792c50 --- /dev/null +++ b/docker/profiles/datahub-mae-consumer @@ -0,0 +1 @@ +../datahub-mae-consumer \ No newline at end of file diff --git a/docker/profiles/datahub-mce-consumer b/docker/profiles/datahub-mce-consumer new file mode 120000 index 00000000000000..288c9d91c28b3e --- /dev/null +++ b/docker/profiles/datahub-mce-consumer @@ -0,0 +1 @@ +../datahub-mce-consumer \ No newline at end of file diff --git a/docker/profiles/datahub-upgrade b/docker/profiles/datahub-upgrade new file mode 120000 index 00000000000000..8ff77fd5562e7f --- /dev/null +++ b/docker/profiles/datahub-upgrade @@ -0,0 +1 @@ +../datahub-upgrade \ No newline at end of file diff --git a/docker/profiles/docker-compose.actions.yml b/docker/profiles/docker-compose.actions.yml new file mode 100644 index 00000000000000..45c1d928443f8e --- /dev/null +++ b/docker/profiles/docker-compose.actions.yml @@ -0,0 +1,90 @@ + +x-datahub-actions-service: &datahub-actions-service + hostname: actions + image: ${DATAHUB_ACTIONS_IMAGE:-${DATAHUB_ACTIONS_REPO:-acryldata}/datahub-actions}:${ACTIONS_VERSION:-v0.0.14} + env_file: + - datahub-actions/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-actions/env/docker.env} + - ${DATAHUB_LOCAL_ACTIONS_ENV:-datahub-actions/env/docker.env} + environment: + ACTIONS_EXTRA_PACKAGES: ${ACTIONS_EXTRA_PACKAGES:-} + ACTIONS_CONFIG: ${ACTIONS_CONFIG:-} + KAFKA_BOOTSTRAP_SERVER: broker:29092 + SCHEMA_REGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + +x-datahub-actions-service-dev: &datahub-actions-service-dev + <<: *datahub-actions-service + +services: + datahub-actions-quickstart: + <<: *datahub-actions-service + profiles: + - quickstart + - quickstart-backend + depends_on: + datahub-gms-quickstart: + condition: service_healthy + datahub-actions-quickstart-cassandra: + <<: *datahub-actions-service + profiles: + - quickstart-cassandra + depends_on: + datahub-gms-quickstart-cassandra: + condition: service_healthy + datahub-actions-quickstart-postgres: + <<: *datahub-actions-service + profiles: + - quickstart-postgres + depends_on: + datahub-gms-quickstart-postgres: + condition: service_healthy + datahub-actions-quickstart-consumers: + <<: *datahub-actions-service + profiles: + - quickstart-consumers + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-actions-debug: + <<: *datahub-actions-service-dev + profiles: + - debug + - debug-backend + depends_on: + datahub-gms-debug: + condition: service_healthy + datahub-actions-debug-postgres: + <<: *datahub-actions-service-dev + profiles: + - debug-postgres + depends_on: + datahub-gms-debug-postgres: + condition: service_healthy + datahub-actions-debug-cassandra: + <<: *datahub-actions-service-dev + profiles: + - debug-cassandra + depends_on: + datahub-gms-debug-cassandra: + condition: service_healthy + datahub-actions-debug-consumers: + <<: *datahub-actions-service-dev + profiles: + - debug-consumers + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy + datahub-actions-debug-neo4j: + <<: *datahub-actions-service-dev + profiles: + - debug-neo4j + depends_on: + datahub-gms-debug-neo4j: + condition: service_healthy + datahub-actions-debug-elasticsearch: + <<: *datahub-actions-service-dev + profiles: + - debug-elasticsearch + depends_on: + datahub-gms-debug-elasticsearch: + condition: service_healthy \ No newline at end of file diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml new file mode 100644 index 00000000000000..345493ba516508 --- /dev/null +++ b/docker/profiles/docker-compose.frontend.yml @@ -0,0 +1,111 @@ + +x-datahub-frontend-service: &datahub-frontend-service + hostname: datahub-frontend-react + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-frontend-react}:${DATAHUB_VERSION:-head} + ports: + - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 + env_file: + - datahub-frontend/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-frontend/env/docker.env} + - ${DATAHUB_LOCAL_FRONTEND_ENV:-datahub-frontend/env/docker.env} + environment: &datahub-frontend-service-env + KAFKA_BOOTSTRAP_SERVER: broker:29092 + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +x-datahub-frontend-service-dev: &datahub-frontend-service-dev + <<: *datahub-frontend-service + image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-frontend-react}:debug + ports: + - ${DATAHUB_MAPPED_FRONTEND_DEBUG_PORT:-5002}:5002 + - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 + environment: + <<: *datahub-frontend-service-env + JAVA_TOOL_OPTIONS: -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5002 + DATAHUB_ANALYTICS_ENABLED: ${DATAHUB_ANALYTICS_ENABLED:-true} + volumes: + - ../../datahub-frontend/build/stage/main:/datahub-frontend + +services: + frontend-quickstart: + <<: *datahub-frontend-service + profiles: + - quickstart + - quickstart-frontend + depends_on: + system-update-quickstart: + condition: service_completed_successfully + frontend-quickstart-cassandra: + <<: *datahub-frontend-service + profiles: + - quickstart-cassandra + depends_on: + system-update-quickstart-cassandra: + condition: service_completed_successfully + frontend-quickstart-postgres: + <<: *datahub-frontend-service + profiles: + - quickstart-postgres + depends_on: + system-update-quickstart-postgres: + condition: service_completed_successfully + frontend-quickstart-consumers: + <<: *datahub-frontend-service + profiles: + - quickstart-consumers + depends_on: + system-update-quickstart: + condition: service_completed_successfully + frontend-debug: + <<: *datahub-frontend-service-dev + profiles: + - debug + depends_on: + system-update-debug: + condition: service_completed_successfully + frontend-debug-frontend: + <<: *datahub-frontend-service-dev + profiles: + - debug-frontend + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + frontend-debug-postgres: + <<: *datahub-frontend-service-dev + profiles: + - debug-postgres + depends_on: + system-update-debug-postgres: + condition: service_completed_successfully + frontend-debug-cassandra: + <<: *datahub-frontend-service-dev + profiles: + - debug-cassandra + depends_on: + system-update-debug-cassandra: + condition: service_completed_successfully + frontend-debug-consumers: + <<: *datahub-frontend-service-dev + profiles: + - debug-consumers + depends_on: + system-update-debug: + condition: service_completed_successfully + frontend-debug-neo4j: + <<: *datahub-frontend-service-dev + profiles: + - debug-neo4j + depends_on: + system-update-debug-neo4j: + condition: service_completed_successfully + frontend-debug-elasticsearch: + <<: *datahub-frontend-service-dev + profiles: + - debug-elasticsearch + depends_on: + system-update-debug-elasticsearch: + condition: service_completed_successfully \ No newline at end of file diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml new file mode 100644 index 00000000000000..e9baa65290e50f --- /dev/null +++ b/docker/profiles/docker-compose.gms.yml @@ -0,0 +1,422 @@ +################################# +# Common Environment Variables +################################# +x-primary-datastore-mysql-env: &primary-datastore-mysql-env + EBEAN_DATASOURCE_HOST: mysql:3306 + EBEAN_DATASOURCE_URL: 'jdbc:mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2' + EBEAN_DATASOURCE_DRIVER: com.mysql.jdbc.Driver + +x-primary-datastore-postgres-env: &primary-datastore-postgres-env + EBEAN_DATASOURCE_HOST: postgres:5432 + EBEAN_DATASOURCE_URL: 'jdbc:postgresql://postgres:5432/datahub' + EBEAN_DATASOURCE_DRIVER: org.postgresql.Driver + EBEAN_POSTGRES_USE_AWS_IAM_AUTH: ${EBEAN_POSTGRES_USE_AWS_IAM_AUTH:-false} + +x-primary-datastore-cassandra-env: &primary-datastore-cassandra-env + CASSANDRA_DATASOURCE_USERNAME: cassandra + CASSANDRA_DATASOURCE_PASSWORD: cassandra + CASSANDRA_HOSTS: cassandra + CASSANDRA_PORT: 9042 + CASSANDRA_DATASOURCE_HOST: 'cassandra:9042' + ENTITY_SERVICE_IMPL: cassandra + +x-graph-datastore-neo4j-env: &graph-datastore-neo4j-env + GRAPH_SERVICE_IMPL: neo4j + NEO4J_HOST: 'http://neo4j:7474' + NEO4J_URI: 'bolt://neo4j' + NEO4J_USERNAME: neo4j + NEO4J_PASSWORD: datahub +x-graph-datastore-search-env: &graph-datastore-search-env + GRAPH_SERVICE_IMPL: elasticsearch + +x-search-datastore-elasticsearch-env: &search-datastore-env + ELASTICSEARCH_HOST: search + ELASTICSEARCH_PORT: 9200 + ELASTICSEARCH_PROTOCOL: http + ELASTICSEARCH_USE_SSL: ${ELASTICSEARCH_USE_SSL:-false} + +x-kafka-env: &kafka-env + KAFKA_BOOTSTRAP_SERVER: broker:29092 + # KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 + SCHEMA_REGISTRY_TYPE: INTERNAL + KAFKA_SCHEMAREGISTRY_URL: http://datahub-gms:8080/schema-registry/api/ + +x-datahub-quickstart-telemetry-env: &datahub-quickstart-telemetry-env + DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-quickstart} + DATAHUB_TELEMETRY_ENABLED: ${DATAHUB_TELEMETRY_ENABLED:-true} + +x-datahub-dev-telemetry-env: &datahub-dev-telemetry-env + DATAHUB_SERVER_TYPE: ${DATAHUB_SERVER_TYPE:-dev} + DATAHUB_TELEMETRY_ENABLED: ${DATAHUB_TELEMETRY_ENABLED:-true} + +################################# +# System Update +################################# +x-datahub-system-update-service: &datahub-system-update-service + hostname: datahub-system-update + image: ${DATAHUB_UPGRADE_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-upgrade}:${DATAHUB_VERSION:-head} + command: + - -u + - SystemUpdate + env_file: + - datahub-upgrade/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-upgrade/env/docker.env} + - ${DATAHUB_LOCAL_SYS_UPDATE_ENV:-datahub-upgrade/env/docker.env} + environment: &datahub-system-update-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] + SCHEMA_REGISTRY_SYSTEM_UPDATE: ${SCHEMA_REGISTRY_SYSTEM_UPDATE:-true} + SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS: ${SPRING_KAFKA_PROPERTIES_AUTO_REGISTER_SCHEMAS:-true} + SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION: ${SPRING_KAFKA_PROPERTIES_USE_LATEST_VERSION:-true} + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +x-datahub-system-update-service-dev: &datahub-system-update-service-dev + <<: *datahub-system-update-service + image: ${DATAHUB_UPGRADE_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-upgrade}:debug + ports: + - ${DATAHUB_MAPPED_UPGRADE_DEBUG_PORT:-5003}:5003 + environment: &datahub-system-update-dev-env + <<: [*datahub-dev-telemetry-env, *datahub-system-update-env] + SKIP_ELASTICSEARCH_CHECK: false + REPROCESS_DEFAULT_BROWSE_PATHS_V2: ${REPROCESS_DEFAULT_BROWSE_PATHS_V2:-false} + JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5003' + volumes: + - ../../datahub-upgrade/build/libs/:/datahub/datahub-upgrade/bin/ + - ../../metadata-models/src/main/resources/:/datahub/datahub-gms/resources + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +################################# +# GMS +################################# +x-datahub-gms-service: &datahub-gms-service + hostname: datahub-gms + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-gms}:${DATAHUB_VERSION:-head} + ports: + - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 + env_file: + - datahub-gms/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-gms/env/docker.env} + - ${DATAHUB_LOCAL_GMS_ENV:-datahub-gms/env/docker.env} + environment: &datahub-gms-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] + healthcheck: + test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health + start_period: 90s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + labels: + io.datahubproject.datahub.component: "gms" + +x-datahub-gms-service-dev: &datahub-gms-service-dev + <<: *datahub-gms-service + image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-gms}:debug + ports: + - ${DATAHUB_MAPPED_GMS_DEBUG_PORT:-5001}:5001 + - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 + environment: &datahub-gms-dev-env + <<: [*datahub-dev-telemetry-env, *datahub-gms-env] + SKIP_ELASTICSEARCH_CHECK: false + JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5001' + BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE: false + SEARCH_SERVICE_ENABLE_CACHE: false + LINEAGE_SEARCH_CACHE_ENABLED: false + SHOW_BROWSE_V2: true + volumes: + - ./datahub-gms/start.sh:/datahub/datahub-gms/scripts/start.sh + - ./datahub-gms/jetty.xml:/datahub/datahub-gms/scripts/jetty.xml + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-gms/scripts/prometheus-config.yaml + - ../../metadata-models/src/main/resources/:/datahub/datahub-gms/resources + - ../../metadata-service/war/build/libs/:/datahub/datahub-gms/bin + - ${HOME}/.datahub/plugins:/etc/datahub/plugins + +################################# +# MAE Consumer +################################# +x-datahub-mae-consumer-service: &datahub-mae-consumer-service + hostname: datahub-mae-consumer + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + ports: + - 9091:9091 + env_file: + - datahub-mae-consumer/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-mae-consumer/env/docker.env} + - ${DATAHUB_LOCAL_MAE_ENV:-datahub-mae-consumer/env/docker.env} + environment: &datahub-mae-consumer-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] + +x-datahub-mae-consumer-service-dev: &datahub-mae-consumer-service-dev + <<: *datahub-mae-consumer-service + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mae-consumer}:debug + environment: + <<: [*datahub-dev-telemetry-env, *datahub-mae-consumer-env] + volumes: + - ./datahub-mae-consumer/start.sh:/datahub/datahub-mae-consumer/scripts/start.sh + - ../../metadata-models/src/main/resources/:/datahub/datahub-mae-consumer/resources + - ../../metadata-jobs/mae-consumer-job/build/libs/:/datahub/datahub-mae-consumer/bin/ + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml + +################################# +# MCE Consumer +################################# +x-datahub-mce-consumer-service: &datahub-mce-consumer-service + hostname: datahub-mce-consumer + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + ports: + - 9090:9090 + env_file: + - datahub-mce-consumer/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-mce-consumer/env/docker.env} + - ${DATAHUB_LOCAL_MCE_ENV:-datahub-mce-consumer/env/docker.env} + environment: &datahub-mce-consumer-env + <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] + +x-datahub-mce-consumer-service-dev: &datahub-mce-consumer-service-dev + <<: *datahub-mce-consumer-service + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mce-consumer}:debug + environment: + <<: [*datahub-dev-telemetry-env, *datahub-mce-consumer-env] + volumes: + - ./datahub-mce-consumer/start.sh:/datahub/datahub-mce-consumer/scripts/start.sh + - ../../metadata-jobs/mce-consumer-job/build/libs/:/datahub/datahub-mce-consumer/bin + - ./monitoring/client-prometheus-config.yaml:/datahub/datahub-mce-consumer/scripts/prometheus-config.yaml + +services: + ################################# + # System Update + ################################# + system-update-quickstart: + <<: *datahub-system-update-service + profiles: + - quickstart + - quickstart-storage + - quickstart-consumers + - quickstart-frontend + - quickstart-backend + depends_on: + mysql-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-quickstart-cassandra: + <<: *datahub-system-update-service + profiles: + - quickstart-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *graph-datastore-neo4j-env, *datahub-system-update-env] + depends_on: + neo4j: + condition: service_healthy + cassandra-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-quickstart-postgres: + <<: *datahub-system-update-service + profiles: + - quickstart-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-system-update-env] + depends_on: + postgres-setup: + condition: service_completed_successfully + opensearch-setup: + condition: service_completed_successfully + kafka-setup: + condition: service_completed_successfully + system-update-debug: + <<: *datahub-system-update-service-dev + profiles: + - debug + - debug-backend + - debug-consumers + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-elasticsearch: + <<: *datahub-system-update-service-dev + profiles: + - debug-elasticsearch + depends_on: + mysql-setup-dev: + condition: service_completed_successfully + elasticsearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-postgres: + <<: *datahub-system-update-service-dev + profiles: + - debug-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-system-update-dev-env] + depends_on: + postgres-setup-dev: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-cassandra: + <<: *datahub-system-update-service-dev + profiles: + - debug-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *datahub-system-update-dev-env] + depends_on: + cassandra-setup: + condition: service_completed_successfully + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + system-update-debug-neo4j: + <<: *datahub-system-update-service-dev + profiles: + - debug-neo4j + environment: + <<: [*graph-datastore-neo4j-env, *datahub-system-update-dev-env] + depends_on: + neo4j: + condition: service_healthy + opensearch-setup-dev: + condition: service_completed_successfully + kafka-setup-dev: + condition: service_completed_successfully + ################################# + # GMS + ################################# + datahub-gms-quickstart: + <<: *datahub-gms-service + profiles: + - quickstart + - quickstart-backend + depends_on: + system-update-quickstart: + condition: service_completed_successfully + datahub-gms-quickstart-cassandra: + <<: *datahub-gms-service + profiles: + - quickstart-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *graph-datastore-neo4j-env, *datahub-gms-env] + depends_on: + system-update-quickstart-cassandra: + condition: service_completed_successfully + datahub-gms-quickstart-postgres: + <<: *datahub-gms-service + profiles: + - quickstart-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-gms-env] + depends_on: + system-update-quickstart-postgres: + condition: service_completed_successfully + datahub-gms-quickstart-consumers: + <<: *datahub-gms-service + profiles: + - quickstart-consumers + environment: + <<: *datahub-gms-env + MAE_CONSUMER_ENABLED: false + MCE_CONSUMER_ENABLED: false + depends_on: + system-update-quickstart: + condition: service_completed_successfully + datahub-gms-debug: + <<: *datahub-gms-service-dev + profiles: + - debug + - debug-backend + depends_on: + system-update-debug: + condition: service_completed_successfully + datahub-gms-debug-postgres: + <<: *datahub-gms-service-dev + profiles: + - debug-postgres + environment: + <<: [*primary-datastore-postgres-env, *datahub-gms-dev-env] + depends_on: + system-update-debug-postgres: + condition: service_completed_successfully + datahub-gms-debug-cassandra: + <<: *datahub-gms-service-dev + profiles: + - debug-cassandra + environment: + <<: [*primary-datastore-cassandra-env, *datahub-gms-dev-env] + depends_on: + system-update-debug-cassandra: + condition: service_completed_successfully + datahub-gms-debug-consumers: + <<: *datahub-gms-service-dev + profiles: + - debug-consumers + environment: + <<: *datahub-gms-dev-env + MAE_CONSUMER_ENABLED: false + MCE_CONSUMER_ENABLED: false + depends_on: + system-update-debug: + condition: service_completed_successfully + datahub-gms-debug-neo4j: + <<: *datahub-gms-service-dev + profiles: + - debug-neo4j + environment: + <<: [*graph-datastore-neo4j-env, *datahub-gms-dev-env] + depends_on: + system-update-debug-neo4j: + condition: service_completed_successfully + datahub-gms-debug-elasticsearch: + <<: *datahub-gms-service-dev + profiles: + - debug-elasticsearch + depends_on: + system-update-debug-elasticsearch: + condition: service_completed_successfully + ################################# + # MAE Consumer + ################################# + datahub-mae-consumer-quickstart-consumers: + <<: *datahub-mae-consumer-service + profiles: + - quickstart-consumers + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-mae-consumer-quickstart-consumers-dev: + <<: *datahub-mae-consumer-service-dev + profiles: + - debug-consumers + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy + ################################# + # MCE Consumer + ################################# + datahub-mce-consumer-quickstart-consumers: + <<: *datahub-mce-consumer-service + profiles: + - quickstart-consumers + depends_on: + datahub-gms-quickstart-consumers: + condition: service_healthy + datahub-mce-consumer-quickstart-consumers-dev: + <<: *datahub-mce-consumer-service-dev + profiles: + - debug-consumers + depends_on: + datahub-gms-debug-consumers: + condition: service_healthy \ No newline at end of file diff --git a/docker/profiles/docker-compose.prerequisites.yml b/docker/profiles/docker-compose.prerequisites.yml new file mode 100644 index 00000000000000..8de220093dda52 --- /dev/null +++ b/docker/profiles/docker-compose.prerequisites.yml @@ -0,0 +1,370 @@ +# Common environment +x-search-datastore-search: &search-datastore-environment + ELASTICSEARCH_HOST: search + ELASTICSEARCH_PORT: 9200 + ELASTICSEARCH_PROTOCOL: http + ELASTICSEARCH_USE_SSL: ${ELASTICSEARCH_USE_SSL:-false} + +# Primary Storage Profiles +x-mysql-profiles-quickstart: &mysql-profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-consumers +x-mysql-profiles-dev: &mysql-profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-consumers + - debug-neo4j + - debug-elasticsearch +x-mysql-profiles: &mysql-profiles + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-consumers + - debug + - debug-frontend + - debug-backend + - debug-consumers + - debug-neo4j + - debug-elasticsearch + +x-postgres-profiles-quickstart: &postgres-profiles-quickstart + - quickstart-postgres +x-postgres-profiles-dev: &postgres-profiles-dev + - debug-postgres +x-postgres-profiles: &postgres-profiles + - quickstart-postgres + - debug-postgres + +x-cassandra-profiles: &cassandra-profiles + - quickstart-cassandra + - debug-cassandra + +# Graph Storage Profiles +x-neo4j-profiles: &neo4j-profiles + - quickstart-cassandra + - debug-neo4j + +# Search Storage Profiles +x-elasticsearch-profiles: &elasticsearch-profiles + - debug-elasticsearch + +x-opensearch-profiles-quickstart: &opensearch-profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers +x-opensearch-profiles-dev: &opensearch-profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j +x-opensearch-profiles: &opensearch-profiles + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j + +# Debug vs Quickstart Profiles +x-profiles-quickstart: &profiles-quickstart + - quickstart + - quickstart-backend + - quickstart-frontend + - quickstart-storage + - quickstart-cassandra + - quickstart-postgres + - quickstart-consumers +x-profiles-dev: &profiles-dev + - debug + - debug-frontend + - debug-backend + - debug-postgres + - debug-cassandra + - debug-consumers + - debug-neo4j + - debug-elasticsearch + +services: + mysql: + profiles: *mysql-profiles + hostname: mysql + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} + command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=caching_sha2_password + ports: + - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 + env_file: mysql/env/docker.env + restart: on-failure + healthcheck: + test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD + start_period: 20s + interval: 2s + timeout: 10s + retries: 5 + volumes: + - ./mysql/init.sql:/docker-entrypoint-initdb.d/init.sql + - mysqldata:/var/lib/mysql + mysql-setup: &mysql-setup + profiles: *mysql-profiles-quickstart + hostname: mysql-setup + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mysql-setup}:${DATAHUB_VERSION:-head} + env_file: mysql-setup/env/docker.env + depends_on: + mysql: + condition: service_healthy + labels: + datahub_setup_job: true + mysql-setup-dev: + <<: *mysql-setup + profiles: *mysql-profiles-dev + image: ${DATAHUB_MYSQL_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-mysql-setup}:debug + postgres: + profiles: *postgres-profiles + hostname: postgres + image: postgres:${DATAHUB_POSTGRES_VERSION:-15.5} + env_file: postgres/env/docker.env + ports: + - '5432:5432' + restart: on-failure + healthcheck: + test: [ "CMD-SHELL", "pg_isready" ] + start_period: 20s + interval: 2s + timeout: 10s + retries: 5 + volumes: + - ./postgres/init.sql:/docker-entrypoint-initdb.d/init.sql + - postgresdata:/var/lib/postgresql/data + postgres-setup: &postgres-setup + profiles: *postgres-profiles-quickstart + hostname: postgres-setup + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-postgres-setup}:${DATAHUB_VERSION:-head} + env_file: postgres-setup/env/docker.env + depends_on: + postgres: + condition: service_healthy + labels: + datahub_setup_job: true + postgres-setup-dev: + <<: *postgres-setup + profiles: *postgres-profiles-dev + image: ${DATAHUB_POSTGRES_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-postgres-setup}:debug + cassandra: + profiles: *cassandra-profiles + hostname: cassandra + image: cassandra:4.1 + ports: + - 9042:9042 + healthcheck: + test: cqlsh -u cassandra -p cassandra -e 'describe keyspaces' + interval: 15s + timeout: 10s + retries: 10 + volumes: + - cassandradata:/var/lib/cassandra + cassandra-setup: + profiles: *cassandra-profiles + hostname: cassandra-setup + image: cassandra:4.1 + command: /bin/bash -c "cqlsh cassandra -f /init.cql" + depends_on: + cassandra: + condition: service_healthy + volumes: + - ./cassandra/init.cql:/init.cql + labels: + datahub_setup_job: true + neo4j: + profiles: *neo4j-profiles + hostname: neo4j + image: neo4j:4.4.28-community + ports: + - ${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474}:7474 + - ${DATAHUB_MAPPED_NEO4J_BOLT_PORT:-7687}:7687 + env_file: neo4j/env/docker.env + healthcheck: + test: wget http://neo4j:$${DATAHUB_NEO4J_HTTP_PORT:-7474} + start_period: 5s + interval: 1s + retries: 5 + timeout: 5s + volumes: + - neo4jdata:/data + kafka-broker: + hostname: broker + image: confluentinc/cp-kafka:7.4.0 + command: + - /bin/bash + - -c + - | + # Generate KRaft clusterID + file_path="/var/lib/kafka/data/clusterID" + + if [ ! -f "$$file_path" ]; then + /bin/kafka-storage random-uuid > $$file_path + echo "Cluster id has been created..." + # KRaft required step: Format the storage directory with a new cluster ID + kafka-storage format --ignore-formatted -t $$(cat "$$file_path") -c /etc/kafka/kafka.properties + fi + + export CLUSTER_ID=$$(cat "$$file_path") + echo "CLUSTER_ID=$$CLUSTER_ID" + + /etc/confluent/docker/run + ports: + - ${DATAHUB_MAPPED_KAFKA_BROKER_PORT:-9092}:9092 + env_file: kafka-broker/env/docker.env + environment: + KAFKA_NODE_ID: 1 + KAFKA_ADVERTISED_LISTENERS: BROKER://broker:29092,EXTERNAL://broker:9092 + KAFKA_LISTENERS: BROKER://broker:29092,EXTERNAL://broker:9092,CONTROLLER://broker:39092 + KAFKA_INTER_BROKER_LISTENER_NAME: BROKER + KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,BROKER:PLAINTEXT,EXTERNAL:PLAINTEXT + KAFKA_PROCESS_ROLES: controller, broker + KAFKA_CONTROLLER_QUORUM_VOTERS: 1@broker:39092 + # https://github.com/confluentinc/cp-all-in-one/issues/120 + KAFKA_LOG4J_LOGGERS: 'org.apache.kafka.image.loader.MetadataLoader=WARN' + KAFKA_ZOOKEEPER_CONNECT: null + healthcheck: + test: nc -z broker $${DATAHUB_KAFKA_BROKER_PORT:-9092} + start_period: 60s + interval: 1s + retries: 5 + timeout: 5s + volumes: + - broker:/var/lib/kafka/data/ + kafka-setup: &kafka-setup + profiles: *profiles-quickstart + hostname: kafka-setup + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + env_file: kafka-setup/env/docker.env + environment: &kafka-setup-env + DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-false} + KAFKA_BOOTSTRAP_SERVER: broker:29092 + USE_CONFLUENT_SCHEMA_REGISTRY: false + depends_on: + kafka-broker: + condition: service_healthy + labels: + datahub_setup_job: true + kafka-setup-dev: + <<: *kafka-setup + profiles: *profiles-dev + environment: + <<: *kafka-setup-env + DATAHUB_PRECREATE_TOPICS: ${DATAHUB_PRECREATE_TOPICS:-true} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-kafka-setup}:debug + elasticsearch: + profiles: *elasticsearch-profiles + hostname: search + image: ${DATAHUB_SEARCH_IMAGE:-elasticsearch}:${DATAHUB_SEARCH_TAG:-7.10.1} + ports: + - ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200 + env_file: elasticsearch/env/docker.env + environment: + - discovery.type=single-node + - ${XPACK_SECURITY_ENABLED:-xpack.security.enabled=false} + deploy: + resources: + limits: + memory: 1G + healthcheck: + test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + start_period: 30s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - esdata:/usr/share/elasticsearch/data + elasticsearch-setup-dev: &elasticsearch-setup-dev + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-elasticsearch-setup}:debug + profiles: *elasticsearch-profiles + hostname: elasticsearch-setup + env_file: elasticsearch-setup/env/docker.env + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-false} + depends_on: + elasticsearch: + condition: service_healthy + labels: + datahub_setup_job: true + opensearch: + profiles: *opensearch-profiles + hostname: search + image: ${DATAHUB_SEARCH_IMAGE:-opensearchproject/opensearch}:${DATAHUB_SEARCH_TAG:-2.11.0} + ports: + - ${DATAHUB_MAPPED_ELASTIC_PORT:-9200}:9200 + env_file: elasticsearch/env/docker.env + environment: + - discovery.type=single-node + - ${XPACK_SECURITY_ENABLED:-plugins.security.disabled=true} + deploy: + resources: + limits: + memory: 1G + healthcheck: + test: curl -sS --fail http://search:$${DATAHUB_ELASTIC_PORT:-9200}/_cluster/health?wait_for_status=yellow&timeout=0s + start_period: 30s + interval: 1s + retries: 3 + timeout: 5s + volumes: + - osdata:/usr/share/elasticsearch/data + opensearch-setup: &opensearch-setup + <<: *elasticsearch-setup-dev + profiles: *opensearch-profiles-quickstart + hostname: opensearch-setup + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} + depends_on: + opensearch: + condition: service_healthy + labels: + datahub_setup_job: true + opensearch-setup-dev: + <<: *opensearch-setup + profiles: *opensearch-profiles-dev + hostname: opensearch-setup-dev + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-${DATAHUB_REPO:-acryldata}/datahub-elasticsearch-setup}:debug + environment: + <<: *search-datastore-environment + USE_AWS_ELASTICSEARCH: ${USE_AWS_ELASTICSEARCH:-true} + depends_on: + opensearch: + condition: service_healthy + +networks: + default: + name: datahub_network + +volumes: + neo4jdata: + esdata: + osdata: + broker: + mysqldata: + cassandradata: + postgresdata: diff --git a/docker/profiles/docker-compose.yml b/docker/profiles/docker-compose.yml new file mode 100644 index 00000000000000..534ca9702e2d79 --- /dev/null +++ b/docker/profiles/docker-compose.yml @@ -0,0 +1,13 @@ +--- +version: '3.9' +name: datahub + +include: + # Contains storage layers: i.e. mysql, kafka, elasticsearch + - docker-compose.prerequisites.yml + # Actions pod + - docker-compose.actions.yml + # Frontend + - docker-compose.frontend.yml + # Remaining components: i.e. gms, system-update, consumers + - docker-compose.gms.yml diff --git a/docker/profiles/elasticsearch b/docker/profiles/elasticsearch new file mode 120000 index 00000000000000..7712783b3e8d64 --- /dev/null +++ b/docker/profiles/elasticsearch @@ -0,0 +1 @@ +../elasticsearch \ No newline at end of file diff --git a/docker/profiles/elasticsearch-setup b/docker/profiles/elasticsearch-setup new file mode 120000 index 00000000000000..670a10e8c37865 --- /dev/null +++ b/docker/profiles/elasticsearch-setup @@ -0,0 +1 @@ +../elasticsearch-setup \ No newline at end of file diff --git a/docker/profiles/kafka-broker b/docker/profiles/kafka-broker new file mode 120000 index 00000000000000..23b248a4e0bbd4 --- /dev/null +++ b/docker/profiles/kafka-broker @@ -0,0 +1 @@ +../broker \ No newline at end of file diff --git a/docker/profiles/kafka-setup b/docker/profiles/kafka-setup new file mode 120000 index 00000000000000..35b9c167ac26e9 --- /dev/null +++ b/docker/profiles/kafka-setup @@ -0,0 +1 @@ +../kafka-setup \ No newline at end of file diff --git a/docker/profiles/monitoring b/docker/profiles/monitoring new file mode 120000 index 00000000000000..1371b42ae4593c --- /dev/null +++ b/docker/profiles/monitoring @@ -0,0 +1 @@ +../monitoring \ No newline at end of file diff --git a/docker/profiles/mysql b/docker/profiles/mysql new file mode 120000 index 00000000000000..057b59f7601652 --- /dev/null +++ b/docker/profiles/mysql @@ -0,0 +1 @@ +../mysql \ No newline at end of file diff --git a/docker/profiles/mysql-setup b/docker/profiles/mysql-setup new file mode 120000 index 00000000000000..f9199ec3fc58f0 --- /dev/null +++ b/docker/profiles/mysql-setup @@ -0,0 +1 @@ +../mysql-setup \ No newline at end of file diff --git a/docker/profiles/neo4j b/docker/profiles/neo4j new file mode 120000 index 00000000000000..0d4849d989d436 --- /dev/null +++ b/docker/profiles/neo4j @@ -0,0 +1 @@ +../neo4j \ No newline at end of file diff --git a/docker/profiles/postgres b/docker/profiles/postgres new file mode 120000 index 00000000000000..be56a57bd0ab8f --- /dev/null +++ b/docker/profiles/postgres @@ -0,0 +1 @@ +../postgres \ No newline at end of file diff --git a/docker/profiles/postgres-setup b/docker/profiles/postgres-setup new file mode 120000 index 00000000000000..38f51721feacb9 --- /dev/null +++ b/docker/profiles/postgres-setup @@ -0,0 +1 @@ +../postgres-setup/ \ No newline at end of file diff --git a/docker/quickstart.sh b/docker/quickstart.sh index a7eadf18bcb664..90c7d4ba9d0a49 100755 --- a/docker/quickstart.sh +++ b/docker/quickstart.sh @@ -33,11 +33,11 @@ echo "Quickstarting DataHub: version ${DATAHUB_VERSION}" if docker volume ls | grep -c -q datahub_neo4jdata then echo "Datahub Neo4j volume found, starting with neo4j as graph service" - cd $DIR && docker-compose pull && docker-compose -p datahub up + cd $DIR && docker compose pull && docker compose -p datahub up else echo "No Datahub Neo4j volume found, starting with elasticsearch as graph service" cd $DIR && \ - DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker-compose -p datahub \ + DOCKER_DEFAULT_PLATFORM="$(uname -m)" docker compose -p datahub \ -f quickstart/docker-compose-without-neo4j.quickstart.yml \ $MONITORING_COMPOSE $CONSUMERS_COMPOSE $M1_COMPOSE up $@ fi diff --git a/docker/quickstart/docker-compose-m1.quickstart.yml b/docker/quickstart/docker-compose-m1.quickstart.yml index 7b7ca4052f3245..d2ac2f151fcbbd 100644 --- a/docker/quickstart/docker-compose-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-m1.quickstart.yml @@ -3,7 +3,6 @@ networks: name: datahub_network services: broker: - container_name: broker depends_on: zookeeper: condition: service_healthy @@ -31,7 +30,6 @@ services: volumes: - broker:/var/lib/kafka/data/ datahub-actions: - container_name: datahub-actions depends_on: datahub-gms: condition: service_healthy @@ -51,7 +49,6 @@ services: hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} datahub-frontend-react: - container_name: datahub-frontend-react depends_on: datahub-gms: condition: service_healthy @@ -67,13 +64,12 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-gms: - container_name: datahub-gms depends_on: datahub-upgrade: condition: service_completed_successfully @@ -97,6 +93,7 @@ services: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true @@ -114,7 +111,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -123,7 +120,6 @@ services: command: - -u - SystemUpdate - container_name: datahub-upgrade depends_on: elasticsearch-setup: condition: service_completed_successfully @@ -157,7 +153,6 @@ services: labels: datahub_setup_job: true elasticsearch: - container_name: elasticsearch deploy: resources: limits: @@ -180,7 +175,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data elasticsearch-setup: - container_name: elasticsearch-setup depends_on: elasticsearch: condition: service_healthy @@ -191,11 +185,10 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: - container_name: kafka-setup depends_on: broker: condition: service_healthy @@ -207,12 +200,11 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password - container_name: mysql environment: - MYSQL_DATABASE=datahub - MYSQL_USER=datahub @@ -233,7 +225,6 @@ services: - ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql - mysqldata:/var/lib/mysql mysql-setup: - container_name: mysql-setup depends_on: mysql: condition: service_healthy @@ -248,7 +239,6 @@ services: labels: datahub_setup_job: true neo4j: - container_name: neo4j environment: - NEO4J_AUTH=neo4j/datahub - NEO4J_dbms_default__database=graph.db @@ -268,7 +258,6 @@ services: volumes: - neo4jdata:/data schema-registry: - container_name: schema-registry depends_on: broker: condition: service_healthy @@ -287,7 +276,6 @@ services: ports: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 zookeeper: - container_name: zookeeper environment: - ZOOKEEPER_CLIENT_PORT=2181 - ZOOKEEPER_TICK_TIME=2000 diff --git a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml index 53dacaf6ef63b0..1ba467d7fb9289 100644 --- a/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j-m1.quickstart.yml @@ -3,7 +3,6 @@ networks: name: datahub_network services: broker: - container_name: broker depends_on: zookeeper: condition: service_healthy @@ -31,7 +30,6 @@ services: volumes: - broker:/var/lib/kafka/data/ datahub-actions: - container_name: datahub-actions depends_on: datahub-gms: condition: service_healthy @@ -51,7 +49,6 @@ services: hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} datahub-frontend-react: - container_name: datahub-frontend-react depends_on: datahub-gms: condition: service_healthy @@ -67,13 +64,12 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-gms: - container_name: datahub-gms depends_on: datahub-upgrade: condition: service_completed_successfully @@ -97,6 +93,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true @@ -109,7 +106,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -118,7 +115,6 @@ services: command: - -u - SystemUpdate - container_name: datahub-upgrade depends_on: elasticsearch-setup: condition: service_completed_successfully @@ -150,7 +146,6 @@ services: labels: datahub_setup_job: true elasticsearch: - container_name: elasticsearch deploy: resources: limits: @@ -173,7 +168,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data elasticsearch-setup: - container_name: elasticsearch-setup depends_on: elasticsearch: condition: service_healthy @@ -184,11 +178,10 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: - container_name: kafka-setup depends_on: broker: condition: service_healthy @@ -200,12 +193,11 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password - container_name: mysql environment: - MYSQL_DATABASE=datahub - MYSQL_USER=datahub @@ -226,7 +218,6 @@ services: - ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql - mysqldata:/var/lib/mysql mysql-setup: - container_name: mysql-setup depends_on: mysql: condition: service_healthy @@ -241,7 +232,6 @@ services: labels: datahub_setup_job: true schema-registry: - container_name: schema-registry depends_on: broker: condition: service_healthy @@ -260,7 +250,6 @@ services: ports: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 zookeeper: - container_name: zookeeper environment: - ZOOKEEPER_CLIENT_PORT=2181 - ZOOKEEPER_TICK_TIME=2000 diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 1ca91aa19206da..893af253095bf3 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -3,7 +3,6 @@ networks: name: datahub_network services: broker: - container_name: broker depends_on: zookeeper: condition: service_healthy @@ -31,7 +30,6 @@ services: volumes: - broker:/var/lib/kafka/data/ datahub-actions: - container_name: datahub-actions depends_on: datahub-gms: condition: service_healthy @@ -51,7 +49,6 @@ services: hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} datahub-frontend-react: - container_name: datahub-frontend-react depends_on: datahub-gms: condition: service_healthy @@ -67,13 +64,12 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-gms: - container_name: datahub-gms depends_on: datahub-upgrade: condition: service_completed_successfully @@ -97,6 +93,7 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true @@ -109,7 +106,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -118,7 +115,6 @@ services: command: - -u - SystemUpdate - container_name: datahub-upgrade depends_on: elasticsearch-setup: condition: service_completed_successfully @@ -150,7 +146,6 @@ services: labels: datahub_setup_job: true elasticsearch: - container_name: elasticsearch deploy: resources: limits: @@ -173,7 +168,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data elasticsearch-setup: - container_name: elasticsearch-setup depends_on: elasticsearch: condition: service_healthy @@ -184,11 +178,10 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: - container_name: kafka-setup depends_on: broker: condition: service_healthy @@ -200,12 +193,11 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password - container_name: mysql environment: - MYSQL_DATABASE=datahub - MYSQL_USER=datahub @@ -218,7 +210,7 @@ services: test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD timeout: 5s hostname: mysql - image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 restart: on-failure @@ -226,7 +218,6 @@ services: - ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql - mysqldata:/var/lib/mysql mysql-setup: - container_name: mysql-setup depends_on: mysql: condition: service_healthy @@ -241,7 +232,6 @@ services: labels: datahub_setup_job: true schema-registry: - container_name: schema-registry depends_on: broker: condition: service_healthy @@ -260,7 +250,6 @@ services: ports: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 zookeeper: - container_name: zookeeper environment: - ZOOKEEPER_CLIENT_PORT=2181 - ZOOKEEPER_TICK_TIME=2000 diff --git a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml index d05933df96a433..a4211acedcf102 100644 --- a/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers-without-neo4j.quickstart.yml @@ -4,8 +4,9 @@ services: - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=false datahub-mae-consumer: - container_name: datahub-mae-consumer environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -19,11 +20,10 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 datahub-mce-consumer: - container_name: datahub-mce-consumer environment: - DATAHUB_SERVER_TYPE=${DATAHUB_SERVER_TYPE:-quickstart} - DATAHUB_SYSTEM_CLIENT_ID=__datahub_system @@ -44,13 +44,15 @@ services: - GRAPH_SERVICE_IMPL=elasticsearch - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=true - PE_CONSUMER_ENABLED=false - UI_INGESTION_ENABLED=false hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 version: '3.9' diff --git a/docker/quickstart/docker-compose.consumers.quickstart.yml b/docker/quickstart/docker-compose.consumers.quickstart.yml index f0bd3a0f927c80..e7571e4baf8b4e 100644 --- a/docker/quickstart/docker-compose.consumers.quickstart.yml +++ b/docker/quickstart/docker-compose.consumers.quickstart.yml @@ -4,11 +4,12 @@ services: - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=false datahub-mae-consumer: - container_name: datahub-mae-consumer depends_on: neo4j: condition: service_healthy environment: + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} - DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID=generic-duhe-consumer-job-client-mcl - DATAHUB_GMS_HOST=datahub-gms - DATAHUB_GMS_PORT=8080 @@ -26,11 +27,10 @@ services: - GRAPH_SERVICE_IMPL=neo4j - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-mae-consumer/resources/entity-registry.yml hostname: datahub-mae-consumer - image: ${DATAHUB_MAE_CONSUMER_IMAGE:-linkedin/datahub-mae-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MAE_CONSUMER_IMAGE:-acryldata/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 datahub-mce-consumer: - container_name: datahub-mce-consumer depends_on: neo4j: condition: service_healthy @@ -54,6 +54,8 @@ services: - GRAPH_SERVICE_IMPL=neo4j - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_HEALTH_CHECK_ENABLED=${KAFKA_CONSUMER_HEALTH_CHECK_ENABLED:-true} + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=false - MCE_CONSUMER_ENABLED=true @@ -64,7 +66,7 @@ services: - PE_CONSUMER_ENABLED=false - UI_INGESTION_ENABLED=false hostname: datahub-mce-consumer - image: ${DATAHUB_MCE_CONSUMER_IMAGE:-linkedin/datahub-mce-consumer}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_MCE_CONSUMER_IMAGE:-acryldata/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 version: '3.9' diff --git a/docker/quickstart/docker-compose.monitoring.quickstart.yml b/docker/quickstart/docker-compose.monitoring.quickstart.yml index 27a6e0120bca18..bddd82e393338f 100644 --- a/docker/quickstart/docker-compose.monitoring.quickstart.yml +++ b/docker/quickstart/docker-compose.monitoring.quickstart.yml @@ -36,7 +36,6 @@ services: - '14268' - '14250' prometheus: - container_name: prometheus image: prom/prometheus:latest ports: - 9089:9090 diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index c77b4418b6f366..f3490ce502626d 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -3,7 +3,6 @@ networks: name: datahub_network services: broker: - container_name: broker depends_on: zookeeper: condition: service_healthy @@ -31,7 +30,6 @@ services: volumes: - broker:/var/lib/kafka/data/ datahub-actions: - container_name: datahub-actions depends_on: datahub-gms: condition: service_healthy @@ -51,7 +49,6 @@ services: hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-acryldata/datahub-actions}:${ACTIONS_VERSION:-head} datahub-frontend-react: - container_name: datahub-frontend-react depends_on: datahub-gms: condition: service_healthy @@ -67,13 +64,12 @@ services: - ELASTIC_CLIENT_HOST=elasticsearch - ELASTIC_CLIENT_PORT=9200 hostname: datahub-frontend-react - image: ${DATAHUB_FRONTEND_IMAGE:-linkedin/datahub-frontend-react}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_FRONTEND_IMAGE:-acryldata/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 volumes: - ${HOME}/.datahub/plugins:/etc/datahub/plugins datahub-gms: - container_name: datahub-gms depends_on: datahub-upgrade: condition: service_completed_successfully @@ -97,6 +93,7 @@ services: - GRAPH_SERVICE_IMPL=${GRAPH_SERVICE_IMPL:-elasticsearch} - JAVA_OPTS=-Xms1g -Xmx1g - KAFKA_BOOTSTRAP_SERVER=broker:29092 + - KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR=${KAFKA_CONSUMER_STOP_ON_DESERIALIZATION_ERROR:-true} - KAFKA_SCHEMAREGISTRY_URL=http://schema-registry:8081 - MAE_CONSUMER_ENABLED=true - MCE_CONSUMER_ENABLED=true @@ -114,7 +111,7 @@ services: test: curl -sS --fail http://datahub-gms:${DATAHUB_GMS_PORT:-8080}/health timeout: 5s hostname: datahub-gms - image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_GMS_IMAGE:-acryldata/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 volumes: @@ -123,7 +120,6 @@ services: command: - -u - SystemUpdate - container_name: datahub-upgrade depends_on: elasticsearch-setup: condition: service_completed_successfully @@ -157,7 +153,6 @@ services: labels: datahub_setup_job: true elasticsearch: - container_name: elasticsearch deploy: resources: limits: @@ -180,7 +175,6 @@ services: volumes: - esdata:/usr/share/elasticsearch/data elasticsearch-setup: - container_name: elasticsearch-setup depends_on: elasticsearch: condition: service_healthy @@ -191,11 +185,10 @@ services: - ELASTICSEARCH_PORT=9200 - ELASTICSEARCH_PROTOCOL=http hostname: elasticsearch-setup - image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-linkedin/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_ELASTIC_SETUP_IMAGE:-acryldata/datahub-elasticsearch-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true kafka-setup: - container_name: kafka-setup depends_on: broker: condition: service_healthy @@ -207,12 +200,11 @@ services: - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - USE_CONFLUENT_SCHEMA_REGISTRY=TRUE hostname: kafka-setup - image: ${DATAHUB_KAFKA_SETUP_IMAGE:-linkedin/datahub-kafka-setup}:${DATAHUB_VERSION:-head} + image: ${DATAHUB_KAFKA_SETUP_IMAGE:-acryldata/datahub-kafka-setup}:${DATAHUB_VERSION:-head} labels: datahub_setup_job: true mysql: command: --character-set-server=utf8mb4 --collation-server=utf8mb4_bin --default-authentication-plugin=mysql_native_password - container_name: mysql environment: - MYSQL_DATABASE=datahub - MYSQL_USER=datahub @@ -225,7 +217,7 @@ services: test: mysqladmin ping -h mysql -u $$MYSQL_USER --password=$$MYSQL_PASSWORD timeout: 5s hostname: mysql - image: mysql:${DATAHUB_MYSQL_VERSION:-5.7} + image: mysql:${DATAHUB_MYSQL_VERSION:-8.2} ports: - ${DATAHUB_MAPPED_MYSQL_PORT:-3306}:3306 restart: on-failure @@ -233,7 +225,6 @@ services: - ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql - mysqldata:/var/lib/mysql mysql-setup: - container_name: mysql-setup depends_on: mysql: condition: service_healthy @@ -248,7 +239,6 @@ services: labels: datahub_setup_job: true neo4j: - container_name: neo4j environment: - NEO4J_AUTH=neo4j/datahub - NEO4J_dbms_default__database=graph.db @@ -268,7 +258,6 @@ services: volumes: - neo4jdata:/data schema-registry: - container_name: schema-registry depends_on: broker: condition: service_healthy @@ -287,7 +276,6 @@ services: ports: - ${DATAHUB_MAPPED_SCHEMA_REGISTRY_PORT:-8081}:8081 zookeeper: - container_name: zookeeper environment: - ZOOKEEPER_CLIENT_PORT=2181 - ZOOKEEPER_TICK_TIME=2000 diff --git a/docker/quickstart/quickstart_version_mapping.yaml b/docker/quickstart/quickstart_version_mapping.yaml index 9948bd55fdc0b6..d9b7b4d661f064 100644 --- a/docker/quickstart/quickstart_version_mapping.yaml +++ b/docker/quickstart/quickstart_version_mapping.yaml @@ -23,7 +23,7 @@ quickstart_version_map: default: composefile_git_ref: master docker_tag: head - mysql_tag: 5.7 + mysql_tag: "8.2" # default: # Use this to pin default to a specific version. # composefile_git_ref: fd1bd51541a132017a648f4a2f037eec8f70ba26 # v0.10.0 + quickstart compose file fixes # docker_tag: v0.10.0 @@ -31,19 +31,19 @@ quickstart_version_map: head: composefile_git_ref: master docker_tag: head - mysql_tag: 5.7 + mysql_tag: "8.2" # v0.13.0 we upgraded MySQL image for EOL v0.13.0: composefile_git_ref: master - docker_tag: head - mysql_tag: 8.2 + docker_tag: v0.13.0 + mysql_tag: "8.2" # v0.9.6 images contain security vulnerabilities v0.9.6: composefile_git_ref: v0.9.6.1 docker_tag: v0.9.6.1 - mysql_tag: 5.7 + mysql_tag: "5.7" # If stable is not defined the latest released version will be used. # stable: diff --git a/docs-website/build.gradle b/docs-website/build.gradle index a213ec1ae8194d..f3bedd2516319a 100644 --- a/docs-website/build.gradle +++ b/docs-website/build.gradle @@ -1,5 +1,7 @@ -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' +plugins { + id 'distribution' + id 'com.github.node-gradle.node' +} node { @@ -12,10 +14,10 @@ node { } // Version of node to use. - version = '16.16.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.1' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -31,7 +33,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } /* @@ -43,7 +45,7 @@ def projectMdFiles = project.fileTree("${project.projectDir}") { include '**/*.ts' exclude 'node_modules' exclude '**/dist/**' - } +} // Combine GraphQL schemas for documentation. task generateGraphQLSchema(type: Exec) { @@ -66,12 +68,25 @@ task yarnInstall(type: YarnTask) { } else { args = ['install'] } + + // The node_modules directory can contain built artifacts, so + // it's not really safe to cache it. + outputs.cacheIf { false } + + inputs.files( + file('yarn.lock'), + file('package.json'), + ) + outputs.dir('node_modules') } task yarnGenerate(type: YarnTask, dependsOn: [yarnInstall, generateGraphQLSchema, generateJsonSchema, ':metadata-ingestion:modelDocGen', ':metadata-ingestion:docGen', - ':metadata-ingestion:buildWheel', ':metadata-ingestion-modules:airflow-plugin:buildWheel'] ) { + ':metadata-ingestion:buildWheel', + ':metadata-ingestion-modules:airflow-plugin:buildWheel', + ':metadata-ingestion-modules:dagster-plugin:buildWheel', + ]) { inputs.files(projectMdFiles) outputs.cacheIf { true } args = ['run', 'generate'] @@ -92,17 +107,11 @@ task fastReload(type: YarnTask) { task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { inputs.files(projectMdFiles) args = ['run', 'lint-check'] - outputs.dir("dist") - // tell gradle to apply the build cache - outputs.cacheIf { true } } task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall]) { inputs.files(projectMdFiles) args = ['run', 'lint-fix'] - outputs.dir("dist") - // tell gradle to apply the build cache - outputs.cacheIf { true } } task serve(type: YarnTask, dependsOn: [yarnInstall] ) { @@ -121,8 +130,12 @@ task yarnBuild(type: YarnTask, dependsOn: [yarnLint, yarnGenerate, downloadHisto outputs.cacheIf { true } // See https://stackoverflow.com/questions/53230823/fatal-error-ineffective-mark-compacts-near-heap-limit-allocation-failed-java // and https://github.com/facebook/docusaurus/issues/8329. - // TODO: As suggested in https://github.com/facebook/docusaurus/issues/4765, try switching to swc-loader. - environment = ['NODE_OPTIONS': '--max-old-space-size=10248'] + // TODO: As suggested in https://github.com/facebook/docusaurus/issues/4765, try switching to swc-loader or esbuild minification. + if (project.hasProperty('useSystemNode') && project.getProperty('useSystemNode').toBoolean()) { + environment = ['NODE_OPTIONS': '--max-old-space-size=10240'] + } else { + environment = ['NODE_OPTIONS': '--max-old-space-size=10240 --openssl-legacy-provider'] + } args = ['run', 'build'] } diff --git a/docs-website/docusaurus.config.js b/docs-website/docusaurus.config.js index 506e263933394a..b390bbed60a3e8 100644 --- a/docs-website/docusaurus.config.js +++ b/docs-website/docusaurus.config.js @@ -23,8 +23,29 @@ module.exports = { noIndex: isSaas, customFields: { isSaas: isSaas, - markpromptProjectKey: process.env.DOCUSAURUS_MARKPROMPT_PROJECT_KEY || "IeF3CUFCUQWuouZ8MP5Np9nES52QAtaA", + markpromptProjectKey: process.env.DOCUSAURUS_MARKPROMPT_PROJECT_KEY || "0U6baUoEdHVV4fyPpr5pxcX3dFlAMEu9", }, + + // See https://github.com/facebook/docusaurus/issues/4765 + // and https://github.com/langchain-ai/langchainjs/pull/1568 + webpack: { + jsLoader: (isServer) => ({ + loader: require.resolve("swc-loader"), + options: { + jsc: { + parser: { + syntax: "typescript", + tsx: true, + }, + target: "es2017", + }, + module: { + type: isServer ? "commonjs" : "es6", + }, + }, + }), + }, + themeConfig: { ...(!isSaas && { announcementBar: { @@ -45,7 +66,7 @@ module.exports = { }, items: [ { - to: "docs/", + to: "docs/features", activeBasePath: "docs", label: "Docs", position: "right", @@ -56,6 +77,25 @@ module.exports = { label: "Integrations", position: "right", }, + { + type: "dropdown", + label: "Community", + position: "right", + items: [ + { + to: "/slack", + label: "Join Slack", + }, + { + to: "/events", + label: "Events", + }, + { + to: "/champions", + label: "Champions", + }, + ], + }, { type: "dropdown", label: "Resources", @@ -66,17 +106,13 @@ module.exports = { label: "Demo", }, { - href: "https://blog.datahubproject.io/", + href: "https://www.acryldata.io/blog", label: "Blog", }, { href: "https://feature-requests.datahubproject.io/roadmap", label: "Roadmap", }, - { - href: "https://slack.datahubproject.io", - label: "Slack", - }, { href: "https://github.com/datahub-project/datahub", label: "GitHub", @@ -85,12 +121,46 @@ module.exports = { href: "https://www.youtube.com/channel/UC3qFQC5IiwR5fvWEqi_tJ5w", label: "YouTube", }, + { + href: "https://www.youtube.com/playlist?list=PLdCtLs64vZvGCKMQC2dJEZ6cUqWsREbFi", + label: "Case Studies", + }, + { + href: "https://www.youtube.com/playlist?list=PLdCtLs64vZvErAXMiqUYH9e63wyDaMBgg", + label: "DataHub Basics", + }, ], }, { type: "docsVersionDropdown", position: "left", dropdownActiveClassDisabled: true, + dropdownItemsAfter: [ + { + type: 'html', + value: '', + }, + { + type: 'html', + value: '', + }, + { + value: ` + 0.11.0 + + + `, + type: "html", + }, + { + value: ` + 0.10.5 + + + `, + type: "html", + }, + ], }, ], }, @@ -194,8 +264,8 @@ module.exports = { }), numberPrefixParser: false, // TODO: make these work correctly with the doc generation - showLastUpdateAuthor: true, - showLastUpdateTime: true, + showLastUpdateAuthor: false, + showLastUpdateTime: false, }, blog: false, theme: { @@ -210,6 +280,9 @@ module.exports = { path: "src/pages", mdxPageComponent: "@theme/MDXPage", }, + googleTagManager: { + containerId: 'GTM-WK28RLTG', + }, }, ], ], @@ -223,7 +296,6 @@ module.exports = { routeBasePath: "/docs/graphql", }, ], - // '@docusaurus/plugin-google-gtag', // [ // require.resolve("@easyops-cn/docusaurus-search-local"), // { diff --git a/docs-website/download_historical_versions.py b/docs-website/download_historical_versions.py index 53ee9cf1e63ef5..7493210ffa2a5f 100644 --- a/docs-website/download_historical_versions.py +++ b/docs-website/download_historical_versions.py @@ -37,9 +37,9 @@ def fetch_urls( except Exception as e: if attempt < max_retries: print(f"Attempt {attempt + 1}/{max_retries}: {e}") - time.sleep(retry_delay) + time.sleep(retry_delay * 2**attempt) else: - print(f"Max retries reached. Unable to fetch data.") + print("Max retries reached. Unable to fetch data.") raise diff --git a/docs-website/genJsonSchema/gen_json_schema.py b/docs-website/genJsonSchema/gen_json_schema.py index 81c1d5a2c1a30f..4af72487644bd6 100644 --- a/docs-website/genJsonSchema/gen_json_schema.py +++ b/docs-website/genJsonSchema/gen_json_schema.py @@ -7,7 +7,7 @@ def get_base() -> Any: return { - "$schema": "http://json-schema.org/draft-04/schema#", + "$schema": "https://json-schema.org/draft/2020-12/schema", "id": "https://json.schemastore.org/datahub-ingestion", "title": "Datahub Ingestion", "description": "Root schema of Datahub Ingestion", @@ -116,7 +116,7 @@ def get_base() -> Any: "bootstrap": { "type": "string", "description": "Kafka bootstrap URL.", - "default": "localhost:9092" + "default": "localhost:9092", }, "producer_config": { "type": "object", @@ -125,7 +125,7 @@ def get_base() -> Any: "schema_registry_url": { "type": "string", "description": "URL of schema registry being used.", - "default": "http://localhost:8081" + "default": "http://localhost:8081", }, "schema_registry_config": { "type": "object", diff --git a/docs-website/generateDocsDir.ts b/docs-website/generateDocsDir.ts index e19f09530665a0..9116218290d32d 100644 --- a/docs-website/generateDocsDir.ts +++ b/docs-website/generateDocsDir.ts @@ -572,6 +572,7 @@ function copy_python_wheels(): void { const wheel_dirs = [ "../metadata-ingestion/dist", "../metadata-ingestion-modules/airflow-plugin/dist", + "../metadata-ingestion-modules/dagster-plugin/dist", ]; const wheel_output_directory = path.join(STATIC_DIRECTORY, "wheels"); diff --git a/docs-website/graphql/generateGraphQLSchema.sh b/docs-website/graphql/generateGraphQLSchema.sh index 4e41c5dfbfacd3..c6d7ec528b6137 100755 --- a/docs-website/graphql/generateGraphQLSchema.sh +++ b/docs-website/graphql/generateGraphQLSchema.sh @@ -16,3 +16,5 @@ cat ../../datahub-graphql-core/src/main/resources/tests.graphql >> combined.grap cat ../../datahub-graphql-core/src/main/resources/timeline.graphql >> combined.graphql cat ../../datahub-graphql-core/src/main/resources/step.graphql >> combined.graphql cat ../../datahub-graphql-core/src/main/resources/lineage.graphql >> combined.graphql +cat ../../datahub-graphql-core/src/main/resources/properties.graphql >> combined.graphql +cat ../../datahub-graphql-core/src/main/resources/forms.graphql >> combined.graphql \ No newline at end of file diff --git a/docs-website/package.json b/docs-website/package.json index eca6e5814d3c6b..a9eb0a0251ca96 100644 --- a/docs-website/package.json +++ b/docs-website/package.json @@ -5,7 +5,7 @@ "scripts": { "docusaurus": "docusaurus", "start": "docusaurus start", - "build": "docusaurus build", + "build": "DOCUSAURUS_SSR_CONCURRENCY=5 docusaurus build", "swizzle": "docusaurus swizzle", "deploy": "docusaurus deploy", "serve": "docusaurus serve", @@ -28,12 +28,14 @@ "@docusaurus/plugin-content-docs": "2.4.1", "@docusaurus/plugin-ideal-image": "^2.4.1", "@docusaurus/preset-classic": "^2.4.1", - "@markprompt/react": "^0.3.5", + "@markprompt/css": "^0.26.0", + "@markprompt/react": "^0.40.5", "@octokit/plugin-retry": "^3.0.9", "@octokit/plugin-throttling": "^3.5.1", "@octokit/rest": "^18.6.2", "@radix-ui/react-visually-hidden": "^1.0.2", "@supabase/supabase-js": "^2.33.1", + "@swc/core": "^1.4.2", "antd": "^5.0.7", "clsx": "^1.1.1", "docusaurus-graphql-plugin": "0.5.0", @@ -44,6 +46,7 @@ "react": "^18.2.0", "react-dom": "18.2.0", "sass": "^1.43.2", + "swc-loader": "^0.2.6", "uuid": "^9.0.0" }, "browserslist": { diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index 5d7c6b06adad41..34398bc8c6661b 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -56,6 +56,7 @@ module.exports = { "docs/features/dataset-usage-and-query-history", "docs/posts", "docs/sync-status", + "docs/incidents/incidents", "docs/generated/lineage/lineage-feature-guide", { type: "doc", @@ -68,24 +69,38 @@ module.exports = { type: "category", items: [ { - type: "doc", - id: "docs/managed-datahub/observe/freshness-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/volume-assertions", - className: "saasOnly", - }, - { - type: "doc", - id: "docs/managed-datahub/observe/custom-sql-assertions", - className: "saasOnly", + label: "Assertions", + type: "category", + link: { + type: "doc", + id: "docs/managed-datahub/observe/assertions", + }, + items: [ + { + type: "doc", + id: "docs/managed-datahub/observe/freshness-assertions", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/observe/volume-assertions", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/observe/custom-sql-assertions", + className: "saasOnly", + }, + { + type: "doc", + id: "docs/managed-datahub/observe/column-assertions", + className: "saasOnly", + }, + ], }, { type: "doc", - id: "docs/managed-datahub/observe/column-assertions", - className: "saasOnly", + id: "docs/managed-datahub/observe/data-contract", }, ], }, @@ -129,11 +144,6 @@ module.exports = { { "GraphQL API": [ "docs/managed-datahub/datahub-api/graphql-api/getting-started", - { - type: "doc", - id: "docs/managed-datahub/datahub-api/graphql-api/incidents-api-beta", - className: "saasOnly", - }, ], }, ], @@ -177,6 +187,9 @@ module.exports = { }, { "Managed DataHub Release History": [ + "docs/managed-datahub/release-notes/v_0_2_16", + "docs/managed-datahub/release-notes/v_0_2_15", + "docs/managed-datahub/release-notes/v_0_2_14", "docs/managed-datahub/release-notes/v_0_2_13", "docs/managed-datahub/release-notes/v_0_2_12", "docs/managed-datahub/release-notes/v_0_2_11", @@ -297,10 +310,16 @@ module.exports = { id: "docs/lineage/airflow", label: "Airflow", }, + { + type: "doc", + id: "docs/lineage/dagster", + label: "Dagster", + }, //"docker/airflow/local_airflow", "metadata-integration/java/spark-lineage/README", "metadata-ingestion/integration_docs/great-expectations", "metadata-integration/java/datahub-protobuf/README", + "metadata-integration/java/spark-lineage-beta/README", //"metadata-ingestion/source-docs-template", { type: "autogenerated", @@ -560,9 +579,18 @@ module.exports = { ], }, { - type: "doc", - label: "OpenAPI", - id: "docs/api/openapi/openapi-usage-guide", + OpenAPI: [ + { + type: "doc", + label: "OpenAPI", + id: "docs/api/openapi/openapi-usage-guide", + }, + { + type: "doc", + label: "Structured Properties", + id: "docs/api/openapi/openapi-structured-properties", + }, + ], }, "docs/dev-guides/timeline", { @@ -739,7 +767,11 @@ module.exports = { //"docs/how/build-metadata-service", //"docs/how/graph-onboarding", //"docs/demo/graph-onboarding", + //"metadata-integration/java/spark-lineage/README", + // "metadata-integration/java/spark-lineage-beta/README.md + // "metadata-integration/java/openlineage-converter/README" //"metadata-ingestion-modules/airflow-plugin/README" + //"metadata-ingestion-modules/dagster-plugin/README" // "metadata-ingestion/schedule_docs/datahub", // we can delete this // TODO: change the titles of these, removing the "What is..." portion from the sidebar" // "docs/what/entity", @@ -767,6 +799,7 @@ module.exports = { // "docs/how/add-user-data", // "docs/_feature-guide-template" // - "metadata-service/services/README" + // "metadata-ingestion/examples/structured_properties/README" // ], ], }; diff --git a/docs-website/sphinx/Makefile b/docs-website/sphinx/Makefile index c01b45e322c679..e8c419f99132f0 100644 --- a/docs-website/sphinx/Makefile +++ b/docs-website/sphinx/Makefile @@ -18,8 +18,8 @@ venv: $(VENV_SENTINEL) $(VENV_SENTINEL): requirements.txt python3 -m venv $(VENV_DIR) - $(VENV_DIR)/bin/pip install --upgrade pip wheel setuptools - $(VENV_DIR)/bin/pip install -r requirements.txt + $(VENV_DIR)/bin/pip install --upgrade pip uv wheel setuptools + VIRTUAL_ENV=$(VENV_DIR) $(VENV_DIR)/bin/uv pip install -r requirements.txt touch $(VENV_SENTINEL) .PHONY: help html doctest linkcheck clean clean_all serve md diff --git a/docs-website/src/components/MarkpromptHelp/index.jsx b/docs-website/src/components/MarkpromptHelp/index.jsx index ac135379d1e973..91c9afec40c7a5 100644 --- a/docs-website/src/components/MarkpromptHelp/index.jsx +++ b/docs-website/src/components/MarkpromptHelp/index.jsx @@ -1,8 +1,9 @@ import React, { useContext, useEffect, useState } from "react"; import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; import { usePluginData } from "@docusaurus/useGlobalData"; +import "@markprompt/css" +import { Markprompt } from "@markprompt/react"; import styles from "./markprompthelp.module.scss"; -import * as Markprompt from "@markprompt/react"; import { VisuallyHidden } from "@radix-ui/react-visually-hidden"; import { LikeOutlined, DislikeOutlined } from "@ant-design/icons"; @@ -12,206 +13,11 @@ const MarkpromptHelp = () => { const { siteConfig = {} } = context; return ( - - - Ask our AI - - - - - - - - {/* Markprompt.Title is required for accessibility reasons. It can be hidden using an accessible content hiding technique. */} - - Ask me anything about DataHub! - - - - - - - - - - -
- This is an experimental AI-powered chat bot. We can't be sure what it will say but hope it can be helpful. If it's not, there are always{" "} - - humans available in our Slack channel. - -
- -
-
-
+ + ); }; -async function sendFeedback(data = {}) { - const response = await fetch("https://hlmqfkovdugtwoddrtkt.supabase.co/rest/v1/queries", { - method: "POST", - headers: { - "Content-Type": "application/json", - Prefer: "return=minimal", - apikey: - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImhsbXFma292ZHVndHdvZGRydGt0Iiwicm9sZSI6ImFub24iLCJpYXQiOjE2ODI5NzQwMjQsImV4cCI6MTk5ODU1MDAyNH0.wNggN7IK43j6KhJmFlELo8HmkTBIQwWt-Rx8vOYXBI8", - }, - redirect: "follow", - referrerPolicy: "no-referrer", - body: JSON.stringify(data), - }); - return response; -} -const Feedback = () => { - const { state, prompt } = useContext(Markprompt.Context); - const [feedbackSubmitted, setFeedbackSubmitted] = useState(false); - - useEffect(() => { - setFeedbackSubmitted(false); - if (state === "preload") { - sendFeedback({ - query: prompt, - }).then((response) => { - console.log(response.ok ? "Logged query" : "Error logging query"); - }); - } - }, [state]); - - const handleClick = (rating) => { - sendFeedback({ - query: prompt, - result_rating: rating, - }).then((response) => { - setFeedbackSubmitted(true); - console.log(response.ok ? "Logged feedback" : "Error logging feedback"); - }); - }; - - return ( -
- {feedbackSubmitted ? ( - <>Thanks for your feedback! - ) : ( - <> - Was this answer helpful?{" "} -
- - -
- - )} -
- ); -}; - -const Caret = () => { - const { answer } = useContext(Markprompt.Context); - - if (answer) { - return null; - } - - return ; -}; - -const getReferenceInfo = (referenceId) => { - const contentDocsData = usePluginData("docusaurus-plugin-content-docs"); - const docs = contentDocsData.versions?.[0]?.docs; - - const lastDotIndex = referenceId.lastIndexOf("."); - if (lastDotIndex !== -1) { - referenceId = referenceId.substring(0, lastDotIndex); - } - const docItem = docs?.find((doc) => doc.id === referenceId); - - const url = docItem?.path; - const title = url?.replace("/docs/generated/", "")?.replace("/docs/", ""); - - return { url, title }; -}; - -const Reference = ({ referenceId, index }) => { - const { title, url } = getReferenceInfo(referenceId); - - if (!url) return null; - - return ( -
  • - - {title || url} - -
  • - ); -}; - -const References = () => { - const { state, references } = useContext(Markprompt.Context); - - if (state === "indeterminate") return null; - - let adjustedState = state; - if (state === "done" && references.length === 0) { - adjustedState = "indeterminate"; - } - - return ( -
    -
    -

    Fetching relevant pages…

    -

    Answer generated from the following sources:

    - -
    - ); -}; - -const SearchIcon = ({ className }) => ( - - - - -); - -const CloseIcon = ({ className }) => ( - - - - -); - -const BotIcon = ({ ...props }) => ( - - - -); export default MarkpromptHelp; diff --git a/docs-website/src/pages/_components/Hero/index.js b/docs-website/src/pages/_components/Hero/index.js index 97a04eb21fa73c..09e4adebf6504b 100644 --- a/docs-website/src/pages/_components/Hero/index.js +++ b/docs-website/src/pages/_components/Hero/index.js @@ -51,7 +51,6 @@ const Hero = ({}) => {
    - DataHub Flow Diagram

    Get Started Now

    Run the following command to get started with DataHub.

    diff --git a/docs-website/src/pages/_components/Section/index.js b/docs-website/src/pages/_components/Section/index.js index 8fb8dc06937cc8..281b8e6928f3ae 100644 --- a/docs-website/src/pages/_components/Section/index.js +++ b/docs-website/src/pages/_components/Section/index.js @@ -17,9 +17,9 @@ const PromoSection = () => ( diff --git a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss index 3d30c65f89539a..862fb04c8370b1 100644 --- a/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss +++ b/docs-website/src/pages/_components/TownhallButton/townhallbutton.module.scss @@ -26,4 +26,4 @@ background-image: linear-gradient(to right, #1890ff 0%, #48DBFB 100%); background-origin: border-box; } - } \ No newline at end of file + } diff --git a/docs-website/src/pages/champions/_components/ChampionQualityCardsSection/championqualitycardssection.module.scss b/docs-website/src/pages/champions/_components/ChampionQualityCardsSection/championqualitycardssection.module.scss new file mode 100644 index 00000000000000..9601938b077091 --- /dev/null +++ b/docs-website/src/pages/champions/_components/ChampionQualityCardsSection/championqualitycardssection.module.scss @@ -0,0 +1,7 @@ +.section { + margin: 2rem 3rem 3rem 3rem; +} + +.card { + margin: 1rem; +} \ No newline at end of file diff --git a/docs-website/src/pages/champions/_components/ChampionQualityCardsSection/index.js b/docs-website/src/pages/champions/_components/ChampionQualityCardsSection/index.js new file mode 100644 index 00000000000000..9caed95973dc00 --- /dev/null +++ b/docs-website/src/pages/champions/_components/ChampionQualityCardsSection/index.js @@ -0,0 +1,44 @@ +import React from "react"; +import styles from "./championqualitycardssection.module.scss"; +import clsx from "clsx"; +import { CodeTwoTone, HeartTwoTone, SoundTwoTone } from "@ant-design/icons"; + +const ChampionQualityCardsSection = () => { + return ( +
    +

    Our Champions...

    +
    +
    +
    +

    + +

    +

    Contribute to our code

    +

    Enhance our projects by contributing to our GitHub repositories.

    +
    +
    +
    +
    +

    + +

    +

    Help out the community

    +

    Support our community by actively participating in our Slack channels

    +
    +
    +
    +
    +

    + +

    +

    Share the exprience

    +

    Inspire others by sharing your adoption story through blogs or town hall sessions.

    +
    +
    +
    +
    + ); + +}; + +export default ChampionQualityCardsSection; \ No newline at end of file diff --git a/docs-website/src/pages/champions/_components/ChampionSection/champion-section.module.scss b/docs-website/src/pages/champions/_components/ChampionSection/champion-section.module.scss new file mode 100644 index 00000000000000..d5f54abcef779a --- /dev/null +++ b/docs-website/src/pages/champions/_components/ChampionSection/champion-section.module.scss @@ -0,0 +1,87 @@ +.section { + display: grid; + grid-template-columns: 1fr; + gap: 3rem; + padding-bottom: 2rem; + margin-bottom: 2rem; + border-bottom: 1px solid var(--ifm-hr-border-color); + :global { + .heading { + font-weight: bold; + text-align: center; + p { + margin-bottom: 0; + line-height: 1.2em; + font-weight: 600 !important; + } + .badge { + margin: 10px auto; + max-width: 300px; + height: auto; + } + @media (min-width: 550px) { + grid-column-start: 1; + grid-column-end: 3; + } + } + } + @media (min-width: 550px) { + grid-template-columns: 1fr 1fr; + } + + @media (min-width: 800px) { + grid-template-columns: 1fr 1fr 1fr; + } + @media (min-width: 1000px) { + grid-template-columns: 1fr 1fr 1fr 1fr; + } +} + +.card { + padding: 2rem; + box-shadow: 0 0 10px 4px rgba(128, 128, 128, 0.1); + border: none; + + &:hover { + transform: scale(1.03); + transition: transform .2s ease; + } + + :global { + .avatar__photo { + padding: 0.5rem; + width: 100%; + max-width: 150px; + height: auto; + margin: 0 auto; + } + .card__header { + text-align: center; + } + .card__body { + text-align: center; + font-size: 13px; + line-height: 16px; + } + .card__footer { + display: flex; + align-items: center; + justify-content: center; + + .logo:hover { + opacity: 0.7; + } + + svg { + width: 26px; + height: 26px; + margin: 0.5rem 0.5rem; + + path { + fill: var(--ifm-font-color-secondary); + } + } + + } + } +} diff --git a/docs-website/src/pages/champions/_components/ChampionSection/index.js b/docs-website/src/pages/champions/_components/ChampionSection/index.js new file mode 100644 index 00000000000000..b42caeaa06f45b --- /dev/null +++ b/docs-website/src/pages/champions/_components/ChampionSection/index.js @@ -0,0 +1,68 @@ +import React from "react"; +import clsx from "clsx"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import styles from "./champion-section.module.scss"; +import { TwitterOutlined, LinkedinFilled, GithubFilled, GlobalOutlined } from "@ant-design/icons"; + +const ChampionSection = ({ people }) => { + return ( +
    + {people.map((person, idx) => { + const { name, image, position, bio, social, location } = person; + return ( +
    + {image ? ( + {name} + ) : ( + Placeholder + )} +
    +

    {name}

    +
    {position}
    +
    +
    +

    {bio}

    +
    +
    + {social.linkedin && ( + + + + + )} + {social.github && ( + + + + + + + )} + {social.twitter && ( + + + + + + )} + {social.web && ( + + + + + + )} +
    +
    + ); + })} +
    + ); +}; + +export default ChampionSection; diff --git a/docs-website/src/pages/champions/index.js b/docs-website/src/pages/champions/index.js new file mode 100644 index 00000000000000..41d1e48ef67720 --- /dev/null +++ b/docs-website/src/pages/champions/index.js @@ -0,0 +1,398 @@ +import React from "react"; +import Layout from "@theme/Layout"; +import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; +import Link from "@docusaurus/Link"; +import ChampionQualityCardsSection from "./_components/ChampionQualityCardsSection" + +import ChampionSection from "./_components/ChampionSection"; + +const championSections = [ + { + people: [ + { + name: "Siladitya Chakraborty", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/siladitya_chakraborty.jpeg", + position: "Data Engineer, Adevinta", + bio: ( + <> +

    + Submiitted 6 pull requests including improvements on graphQL and search API. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/aditya-0bab9a84/", + github: "https://github.com/siladitya2", + }, + location: "Barcelona, Spain" + }, + { + name: "Sergio Gómez Villamor", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/champ-img/imgs/datahub-champions/sergio_gomez_villamor.jpg", + position: "Tech Lead, Adevinta", + bio: ( + <> +

    + Submitted 26 pull requests and raised 4 issues, also featured in "Humans of DataHub." +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/sgomezvillamor/", + github: "https://github.com/sgomezvillamor/", + }, + location: "Barcelona,Spain" + }, + { + name: "Amanda Ng", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/data-practitioners-guild/amanda-ng.png", + position: "Lead Software Engineer, Grab", + bio: ( + <> +

    + Submitted 9 pull requests and shared Grab's experience adopting and implementing DataHub during the October 2022 Town Hall. +

    + + ), + social: { + linkedin: "https://sg.linkedin.com/in/amandang19", + github: "https://github.com/ngamanda", + web: "https://ngamanda.com/", + }, + location: "Singapore" + }, + { + name: "Patrick Braz", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/data-practitioners-guild/patrick-franco-braz.jpeg", + position: "Data Engineering Specialist, Grupo Boticário", + bio: ( + <> +

    + Submitted 16 pull requests and 3 issues and regularly provided guidance to Community Members in Slack channels. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/patrick-franco-braz/", + github: "https://github.com/PatrickfBraz", + }, + location: "Rio de Janeiro, Brazil" + }, + { + name: "Steve Pham", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/data-practitioners-guild/cuong-pham.jpeg", + bio: ( + <> +

    + Submitted 4 pull requests and reliably provided direction to Community Members across all support channels in Slack. +

    + + ), + social: { + }, + }, + { + name: "Piotr Skrydalewicz", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/data-practitioners-guild/initials/ps_white.jpg", + position: "Data Engineer", + bio: ( + <> +

    + Contributed 5 commits in 2022 to the main DataHub Project and Helm repositories, including Stateful Ingestion support for Presto-on-Hive. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/skrydal", + }, + location: "Lodz, Poland" + }, + { + name: "Harvey Li", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/data-practitioners-guild/initials/hl_black.jpg", + position: "Lead Data Engineer, Grab", + bio: ( + <> +

    + Shared Grab's experience adopting and implementing DataHub during the October 2022 Town Hall and featured in Humans of DataHub. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/li-haihui", + github: "https://github.com/HarveyLeo", + }, + location: "Singapore" + }, + { + name: "Fredrik Sannholm", + position: "Wolt", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/data-practitioners-guild/initials/fs_black.jpg", + position: "", + bio: ( + <> +

    + Drove DataHub adoption at Wolt and featured in Humans of DataHub. +

    + + ), + social: { + }, + location: "Finland" + }, + { + name: "Scott Maciej", + position: "Optum", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/champ-img/imgs/datahub-champions/initials/sm.jpg", + bio: ( + <> +

    + Drove DataHub's adaptation and implementation at Optum. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/scottmaciej/", + github: "https://github.com/sgm44", + web: "https://www.badhabitbeer.com/", + }, + location: "USA" + }, + { + name: "Tim Bossenmaier", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/champ-img/imgs/datahub-champions/tim_bossenmaier.jpeg", + position: "Data & Software Engineer, Bytefabrik.AI", + bio: ( + <> +

    + Reliably provides direction to community members and submitted 5 pull request, including improvements to Athena ingestion (support for nested schemas) and the REST emitter. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/tim-bossenmaier/", + github: "https://github.com/bossenti", + }, + location: "Sigmaringen, Germany" + }, + { + name: "Nikola Kasev", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/champ-img/imgs/datahub-champions/nikola_kasev.jpeg", + position: "Data Engineer, KPN", + bio: ( + <> +

    + Reliably provided direction to Community Members across all support channels in Slack. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/nikolakasev", + github: "https://github.com/nikolakasev", + }, + location: "Haarlem, Noord-holland" + }, + { + name: "Nidhin Nandhakumar", + position: "Coursera", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/nn.jpg", + bio: ( + <> +

    + Drove DataHub's adaptation and implementation on Coursera. +

    + + ), + social: { + }, + }, + { + name: "Wu Teng", + position: "CashApp", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/wt.jpg", + bio: ( + <> +

    + Reliably provided direction to Community Members across all support channels in Slack. +

    + + ), + social: { + }, + location: "Australia" + }, + { + name: "Felipe Gusmao", + position: "Zynga", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/fg.jpg", + bio: ( + <> +

    + Shared Zynga's experience adopting and implementing DataHub during the September 2023 Town Hall. +

    + + ), + social: { + }, + location: "Toronto, Canada" + }, + { + name: "Sudhakara ST", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/ss.jpg", + position: "Engineer, Zynga", + bio: ( + <> +

    + Reliably provided direction to Community Members across all support channels in Slack and shared Zynga's experience adopting and implementing DataHub during the September 2023 Town Hall. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/sudhakara-st/", + }, + location: "Bengaluru, India" + }, + { + name: "Bobbie-Jean Nowak", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/bn.jpg", + position: "Technical Product Manager, Optum ", + bio: ( + <> +

    + Drove DataHub's adaptation and implementation at Optum. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/bobbie-jean-nowak-a0076b77/", + }, + location: "Minnesota, USA" + }, + { + name: "Dima Korendovych", + position: "Optum", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/dk.jpg", + bio: ( + <> +

    + Drove DataHub's adaptation and implementation at Optum. +

    + + ), + social: { + }, + location: "USA" + }, + { + name: "Tim Drahn", + position: "Optum", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/td.jpg", + position: "Solution Architect, Optum Technologies", + bio: ( + <> +

    + Submitted 2 pull requests and 1 issue while reliably providing direction to Community Members across all support channels in Slack. +

    + + ), + social: { + linkedin: "https://www.linkedin.com/in/tim-drahn-a873532b/", + github: "https://github.com/tkdrahn", + }, + location: "MA, USA" + }, + { + name: "Kate Koy", + position: "Optum", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/kk.jpg", + bio: ( + <> +

    + Drove DataHub's adaptation and implementation at Optum. +

    + + ), + social: { + }, + location: "USA" + }, + { + name: "Anjali Arora", + position: "Optum", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/aa.jpg", + bio: ( + <> +

    + Drove DataHub's adaptation and implementation at Optum. +

    + + ), + social: { + }, + location: "USA" + }, + { + name: "Raj Tekal", + image: "https://raw.githubusercontent.com/datahub-project/static-assets/main/imgs/datahub-champions/initials/rt.jpg", + position: "Lead Software Engineer, Optum Technologies", + bio: ( + <> +

    + Submitted 4 pull requests. +

    + + ), + social: { + }, + location: "PA, USA" + }, + ], + }, +]; + +const HeroImage = (props) => { + return ( + <> + DataHub Champions + + ); +}; + +function Champion() { + const context = useDocusaurusContext(); + const { siteConfig = {} } = context; + + return ( + +
    +
    +
    +
    + +

    DataHub Champions

    +

    + Recognizing community members who have made exceptional contributions to further the collective success of DataHub. +

    + +
    +

    Meet Our Champions

    +
    + + See Data Practitioner Guild (2022) → + +
    +
    + + {championSections.map((section, idx) => ( + + ))} +
    +
    +
    + ); +} + +export default Champion; diff --git a/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss b/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss index 61739d5b6922cb..69558d986ada9b 100644 --- a/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss +++ b/docs-website/src/pages/docs/_components/FeatureCard/featurecard.module.scss @@ -1,11 +1,15 @@ +@media (min-width: 997px) and (max-width: 1465px) { + .feature { + min-height: 20rem !important; + max-height: 30rem !important; + } +} .feature { flex-direction: row; - padding: 1.75rem; color: var(--ifm-hero-text-color); margin: 0rem 2rem 1rem 0rem; - min-height: 14rem; - max-height: 15rem; - overflow: hidden; + min-height: 15rem; + max-height: 15rem; text-decoration: none !important; img { @@ -36,3 +40,4 @@ border-color: var(--ifm-color-primary); } } + diff --git a/docs-website/src/pages/docs/_components/FeatureCard/index.jsx b/docs-website/src/pages/docs/_components/FeatureCard/index.jsx index 407e8eb4019878..8fb24493e50e9a 100644 --- a/docs-website/src/pages/docs/_components/FeatureCard/index.jsx +++ b/docs-website/src/pages/docs/_components/FeatureCard/index.jsx @@ -8,7 +8,7 @@ const FeatureCard = ({icon, title, description, to}) => { return (
    -
    +
    {icon} {title} → {description} diff --git a/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx b/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx index b4e3895fa40e77..d23901506dcce3 100644 --- a/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx +++ b/docs-website/src/pages/docs/_components/QuickstartCard/index.jsx @@ -9,9 +9,13 @@ const QuickstartCard = ({ icon, title, to, color, fontColor }) => { return (
    - -
    - {title} → +
    + +
    +
    + {title} → +
    +
    diff --git a/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss b/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss index fd35a4b777c99c..70515919060e64 100644 --- a/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss +++ b/docs-website/src/pages/docs/_components/QuickstartCard/quickstartcard.module.scss @@ -2,47 +2,34 @@ flex-direction: row; height: 10rem; flex-shrink: 0; - padding: 3rem; color: var(--ifm-text-color); margin: 0rem 2rem 1rem 0rem; min-height: calc(100% - 1rem); text-decoration: none !important; - img { width: 3rem; height: 3rem; - margin: auto 1rem; + margin: auto; } svg { width: 1.5rem; height: 1.5rem; margin-right: 0.75rem; } - strong, - span { - display: block; - margin-bottom: 0.25rem; - } - strong { - font-weight: 600; - padding: auto 0; - } - span { - font-size: 0.875rem; - line-height: 1.25em; - } &:hover { border-color: var(--ifm-color-primary); } - .quickstart-text { - margin: auto 0; - } - } -.quickstart-text { - margin: auto 0; +.card_content { + display: flex; + margin: 0 auto; +} + +.card_title { + padding-left: 1rem; + font-weight: 600; } \ No newline at end of file diff --git a/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss b/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss index 4fbbc4583d6629..833ec97b15ca32 100644 --- a/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss +++ b/docs-website/src/pages/docs/_components/QuickstartCards/quickstartcards.module.scss @@ -15,12 +15,9 @@ height: 1.5rem; margin-right: 0.75rem; } - strong, - span { + strong { display: block; margin-bottom: 0.25rem; - } - strong { font-weight: 600; } diff --git a/docs-website/src/pages/docs/index.js b/docs-website/src/pages/docs/index.js index 0edd07267b27ec..3f123e7b488bae 100644 --- a/docs-website/src/pages/docs/index.js +++ b/docs-website/src/pages/docs/index.js @@ -1,288 +1,8 @@ -import React from "react"; -import Layout from "@theme/Layout"; -import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; -import SearchBar from "./_components/SearchBar"; -import QuickLinkCards from "./_components/QuickLinkCards"; -import GuideList from "./_components/GuideList"; +import React from 'react'; +import { Redirect } from '@docusaurus/router'; -import { - FolderTwoTone, - BookTwoTone, - TagsTwoTone, - ApiTwoTone, - SearchOutlined, - CompassTwoTone, - NodeExpandOutlined, - CheckCircleTwoTone, - SafetyCertificateTwoTone, - LockTwoTone, - SlackOutlined, - HistoryOutlined, - InteractionOutlined, - GlobalOutlined, - FileTextOutlined, -} from "@ant-design/icons"; +const Home = () => { + return ; +}; -//quickLinkCards -import { - ThunderboltTwoTone, - DeploymentUnitOutlined, - SyncOutlined, - CodeTwoTone, - QuestionCircleTwoTone, - SlidersTwoTone, - HeartTwoTone, -} from "@ant-design/icons"; - -const deploymentGuideContent = [ - { - title: "Managed DataHub", - platformIcon: "acryl", - to: "docs/saas", - }, - { - title: "Docker", - platformIcon: "docker", - to: "docs/docker", - }, - // { - // title: "AWS ECS", - // platformIcon: "amazon-ecs", - // to: "docs/deploy/aws", - // }, - { - title: "AWS", - platformIcon: "amazon-eks", - to: "docs/deploy/aws", - }, - { - title: "GCP", - platformIcon: "google-cloud", - to: "docs/deploy/gcp", - }, -]; - -const ingestionGuideContent = [ - { - title: "Snowflake", - platformIcon: "snowflake", - to: "docs/generated/ingestion/sources/snowflake", - }, - { - title: "Looker", - platformIcon: "looker", - to: "docs/generated/ingestion/sources/looker", - }, - { - title: "Redshift", - platformIcon: "redshift", - to: "docs/generated/ingestion/sources/redshift", - }, - { - title: "Hive", - platformIcon: "hive", - to: "docs/generated/ingestion/sources/hive", - }, - { - title: "BigQuery", - platformIcon: "bigquery", - to: "docs/generated/ingestion/sources/bigquery", - }, - { - title: "dbt", - platformIcon: "dbt", - to: "docs/generated/ingestion/sources/dbt", - }, - { - title: "Athena", - platformIcon: "athena", - to: "docs/generated/ingestion/sources/athena", - }, - { - title: "PostgreSQL", - platformIcon: "postgres", - to: "docs/generated/ingestion/sources/postgres", - }, -]; - -const featureGuideContent = [ - { title: "Domains", icon: , to: "docs/domains" }, - { - title: "Glossary Terms", - icon: , - to: "docs/glossary/business-glossary", - }, - { title: "Tags", icon: , to: "docs/tags" }, - { - title: "Ingestion", - icon: , - to: "docs/ui-ingestion", - }, - { title: "Search", icon: , to: "docs/how/search" }, - // { title: "Browse", icon: , to: "/docs/quickstart" }, - { - title: "Lineage Impact Analysis", - icon: , - to: "docs/act-on-metadata/impact-analysis", - }, - { - title: "Metadata Tests", - icon: , - to: "docs/tests/metadata-tests", - }, - { - title: "Approval Flows", - icon: , - to: "docs/managed-datahub/approval-workflows", - }, - { - title: "Personal Access Tokens", - icon: , - to: "docs/authentication/personal-access-tokens", - }, - { - title: "Slack Notifications", - icon: , - to: "docs/managed-datahub/saas-slack-setup", - }, - { - title: "Schema History", - icon: , - to: "docs/schema-history", - }, -]; - -const quickLinkContent = [ - { - title: "Get Started", - icon: , - description: "Details on how to get DataHub up and running", - to: "/docs/quickstart", - }, - { - title: "Ingest Metadata", - icon: , - description: "Details on how to get Metadata loaded into DataHub", - to: "/docs/metadata-ingestion", - }, - { - title: "API", - icon: , - description: "Details on how to utilize Metadata programmatically", - to: "docs/api/datahub-apis", - }, - { - title: "Act on Metadata", - icon: , - description: "Step-by-step guides for acting on Metadata Events", - to: "docs/act-on-metadata", - }, - { - title: "Developer Guides", - icon: , - description: "Interact with DataHub programmatically", - to: "/docs/api/datahub-apis", - }, - { - title: "Feature Guides", - icon: , - description: "Step-by-step guides for making the most of DataHub", - to: "/docs/how/search", - }, - { - title: "Deployment Guides", - icon: , - description: "Step-by-step guides for deploying DataHub to production", - to: "/docs/deploy/aws", - }, - { - title: "Join the Community", - icon: , - description: "Collaborate, learn, and grow with us", - to: "/docs/slack", - }, -]; - -const gitLinkContent = [ - { - title: "datahub", - icon: , - to: "https://github.com/datahub-project/datahub", - }, - { - title: "datahub-actions", - icon: , - to: "https://github.com/acryldata/datahub-actions", - }, - { - title: "datahub-helm", - icon: , - to: "https://github.com/acryldata/datahub-helm", - }, - { - title: "meta-world", - icon: , - to: "https://github.com/acryldata/meta-world", - }, - { - title: "business-glossary-sync-action", - icon: , - to: "https://github.com/acryldata/business-glossary-sync-action", - }, - { - title: "dbt-impact-action", - icon: , - to: "https://github.com/acryldata/dbt-impact-action", - }, -]; - -function Docs() { - const context = useDocusaurusContext(); - const { siteConfig = {} } = context; - - return ( - -
    -
    -
    -
    -

    Documentation

    -

    - Guides and tutorials for everything DataHub. -

    - -
    -
    - - - - - -
    -
    -
    - ); -} - -export default Docs; +export default Home; \ No newline at end of file diff --git a/smoke-test/tests/managed-ingestion/__init__.py b/docs-website/src/pages/events/events.module.scss similarity index 100% rename from smoke-test/tests/managed-ingestion/__init__.py rename to docs-website/src/pages/events/events.module.scss diff --git a/docs-website/src/pages/events/index.js b/docs-website/src/pages/events/index.js new file mode 100644 index 00000000000000..40183c8294e446 --- /dev/null +++ b/docs-website/src/pages/events/index.js @@ -0,0 +1,38 @@ +import React from 'react'; +import Layout from '@theme/Layout'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; + +function Events() { + const { siteConfig = {} } = useDocusaurusContext(); + + return ( + +
    +
    +
    +

    DataHub Community Event Calendar

    +
    Join our monthly events to network and learn more about our community!
    +
    + +
    +
    +
    +
    +
    + ); +} + +export default Events; diff --git a/docs-website/src/pages/guild/index.js b/docs-website/src/pages/guild/index.js index 84470ef9445e36..c66f519128121c 100644 --- a/docs-website/src/pages/guild/index.js +++ b/docs-website/src/pages/guild/index.js @@ -2,6 +2,7 @@ import React from "react"; import Layout from "@theme/Layout"; import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; import { useColorMode } from "@docusaurus/theme-common"; +import Link from "@docusaurus/Link"; import GuildSection from "./_components/GuildSection"; @@ -801,6 +802,11 @@ function Guild() { Celebrating community members that have gone above and beyond to contribute to the collective success of DataHub

    +
    + + See the DataHub Champions (2023) → + +
    {guildSections.map((section, idx) => ( diff --git a/docs-website/src/pages/hack_display_image.tsx b/docs-website/src/pages/hack_display_image.tsx new file mode 100644 index 00000000000000..ef4faae504fb4f --- /dev/null +++ b/docs-website/src/pages/hack_display_image.tsx @@ -0,0 +1,32 @@ +import React from "react"; +import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; +import Layout from "@theme/Layout"; +import Image from "@theme/IdealImage"; + +function DisplayImageHack() { + // Needed because the datahub-web-react app used to directly link to this image. + // See https://github.com/datahub-project/datahub/pull/9785 + + const context = useDocusaurusContext(); + const { siteConfig } = context; + + return ( + + DataHub Flow Diagram + DataHub Flow Diagram + + ); +} + +export default DisplayImageHack; diff --git a/docs-website/src/pages/index.js b/docs-website/src/pages/index.js index af5774749609c9..07345ac7100fa2 100644 --- a/docs-website/src/pages/index.js +++ b/docs-website/src/pages/index.js @@ -39,6 +39,45 @@ function Home() { > +
    +
    +
    +
    +
    +