Skip to content

perf: Improve dashboard performance by decreasing rerenders #51804

perf: Improve dashboard performance by decreasing rerenders

perf: Improve dashboard performance by decreasing rerenders #51804

# Python Presto/Hive unit tests
name: Python Presto/Hive
on:
push:
branches:
- "master"
- "[0-9].[0-9]*"
pull_request:
types: [synchronize, opened, reopened, ready_for_review]
# cancel previous workflow jobs for PRs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
cancel-in-progress: true
jobs:
test-postgres-presto:
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:[email protected]:15432/superset
SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default
services:
postgres:
image: postgres:15-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
presto:
image: starburstdata/presto:350-e.6
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15433:8080
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python == 'true'
- name: Setup Postgres
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: |
echo "${{ steps.check.outputs.python }}"
setup-postgres
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python
run: |
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v4
with:
flags: python,presto
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
test-postgres-hive:
runs-on: ubuntu-22.04
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.integration_tests.superset_test_config
REDIS_PORT: 16379
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:[email protected]:15432/superset
SUPERSET__SQLALCHEMY_EXAMPLES_URI: hive://localhost:10000/default
UPLOAD_FOLDER: /tmp/.superset/uploads/
services:
postgres:
image: postgres:15-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
redis:
image: redis:7-alpine
ports:
- 16379:6379
steps:
- name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: recursive
- name: Check for file changes
id: check
uses: ./.github/actions/change-detector/
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Create csv upload directory
if: steps.check.outputs.python
run: sudo mkdir -p /tmp/.superset/uploads
- name: Give write access to the csv upload directory
if: steps.check.outputs.python
run: sudo chown -R $USER:$USER /tmp/.superset
- name: Start hadoop and hive
if: steps.check.outputs.python
run: docker compose -f scripts/databases/hive/docker-compose.yml up -d
- name: Setup Python
uses: ./.github/actions/setup-backend/
if: steps.check.outputs.python
- name: Setup Postgres
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: setup-postgres
- name: Start Celery worker
if: steps.check.outputs.python
uses: ./.github/actions/cached-dependencies
with:
run: celery-worker
- name: Python unit tests (PostgreSQL)
if: steps.check.outputs.python
run: |
pip install -e .[hive]
./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow'
- name: Upload code coverage
uses: codecov/codecov-action@v4
with:
flags: python,hive
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true