Skip to content

docs(fix): skip doctest for SqlString return #1331

docs(fix): skip doctest for SqlString return

docs(fix): skip doctest for SqlString return #1331

name: Docs main
on:
push:
branches:
- main
merge_group:
# only a single docs job that pushes to `main` can run at any given time
concurrency: docs-${{ github.repository }}
permissions:
# increase the rate limit for github operations, but limit token permissions
# to read-only
contents: read
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: install nix
uses: cachix/install-nix-action@v30
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: setup cachix
uses: cachix/cachix-action@v15
with:
name: ibis
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
extraPullNames: nix-community
- name: checkout
uses: actions/checkout@v4
- name: run doctests
# keep HOME because duckdb (which we use for doctests) wants to use
# that for extensions
run: nix develop '.#ibis311' --ignore-environment --keep HOME --keep HYPOTHESIS_PROFILE -c just doctest
build:
runs-on: ubuntu-latest
steps:
- name: install nix
uses: cachix/install-nix-action@v30
with:
extra_nix_config: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
- name: setup cachix
uses: cachix/cachix-action@v15
with:
name: ibis
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
extraPullNames: nix-community
- name: checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: restore cache of the previously rendered notebooks
uses: actions/cache/restore@v4
with:
# https://docs.github.com/en/webhooks/webhook-events-and-payloads#push
# > The SHA of the most recent commit on ref before the push.
key: docs-${{ github.event.before }}
path: docs/**/.jupyter_cache
- name: run quarto check
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c quarto check
- name: build api docs
run: nix develop '.#ibis311' --ignore-environment -c just docs-apigen --verbose
- name: build docs
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just docs-render
- name: cache rendered notebooks
uses: actions/cache/save@v4
with:
key: docs-${{ github.sha }}
path: docs/**/.jupyter_cache
- name: build jupyterlite
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just build-jupyterlite
- name: check that all frozen computations were done before push
run: git diff --exit-code --stat
- name: verify internal links
run: nix develop --ignore-environment '.#links' -c just checklinks --offline --no-progress
- name: deploy docs
run: nix develop --ignore-environment --keep NETLIFY_AUTH_TOKEN -c just docs-deploy
env:
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
# Upload index related
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install Algolia API Dependencies
run: |
python -m pip install --upgrade 'algoliasearch<4'
- name: Create and Upload Base Index
run: |
python .github/workflows/algolia/upload-algolia.py
env:
ALGOLIA_WRITE_API_KEY: ${{ secrets.ALGOLIA_WRITE_API_KEY }}
ALGOLIA_APP_ID: TNU9HG3L41
ALGOLIA_INDEX: prod_ibis
- name: Create and Upload API Records to index
run: |
python .github/workflows/algolia/upload-algolia-api.py
env:
ALGOLIA_WRITE_API_KEY: ${{ secrets.ALGOLIA_WRITE_API_KEY }}
ALGOLIA_APP_ID: TNU9HG3L41
ALGOLIA_INDEX: prod_ibis
- name: Configure custom ranking on Algolia
run: |
python .github/workflows/algolia/configure-algolia-api.py
env:
ALGOLIA_WRITE_API_KEY: ${{ secrets.ALGOLIA_WRITE_API_KEY }}
ALGOLIA_APP_ID: TNU9HG3L41
ALGOLIA_INDEX: prod_ibis