Skip to content

Support SparkIntegration activation after SparkContext created #9876

Support SparkIntegration activation after SparkContext created

Support SparkIntegration activation after SparkContext created #9876

Workflow file for this run

name: CI
on:
push:
branches:
- master
- release/**
- sentry-sdk-2.0
pull_request:
permissions:
contents: read
env:
BUILD_CACHE_KEY: ${{ github.sha }}
CACHED_BUILD_PATHS: |
${{ github.workspace }}/dist-serverless
jobs:
lint:
name: Lint Sources
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/[email protected]
- uses: actions/setup-python@v5
with:
python-version: 3.12
- run: |
pip install tox
tox -e linters
check-ci-config:
name: Check CI config
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/[email protected]
- uses: actions/setup-python@v5
with:
python-version: 3.12
- run: |
pip install jinja2
python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes
build_lambda_layer:
name: Build Package
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/[email protected]
- uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Setup build cache
uses: actions/cache@v4
id: build_cache
with:
path: ${{ env.CACHED_BUILD_PATHS }}
key: ${{ env.BUILD_CACHE_KEY }}
- name: Build Packages
run: |
echo "Creating directory containing Python SDK Lambda Layer"
# This will also trigger "make dist" that creates the Python packages
make aws-lambda-layer
- name: Upload Python Packages
uses: actions/upload-artifact@v3
with:
name: ${{ github.sha }}
path: |
dist/*
docs:
name: Build SDK API Doc
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/[email protected]
- uses: actions/setup-python@v5
with:
python-version: 3.12
- run: |
make apidocs
cd docs/_build && zip -r gh-pages ./
- uses: actions/[email protected]
with:
name: ${{ github.sha }}
path: docs/_build/gh-pages.zip