Added Red Hat OpenShift AI Integration doc (#8429) #1192
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: lakeFS HadoopFS Compatibility Tests | |
on: | |
push: | |
branches: | |
- master | |
workflow_dispatch: {} | |
# These permissions are needed to interact with GitHub's OIDC Token endpoint. | |
permissions: | |
id-token: write | |
contents: read | |
jobs: | |
gen-code: | |
name: Generate code from latest lakeFS app | |
runs-on: ubuntu-22.04 | |
steps: | |
- name: Check-out code | |
uses: actions/checkout@v4 | |
- name: Setup Go | |
uses: actions/setup-go@v4 | |
with: | |
go-version: "1.22.6" | |
id: go | |
- uses: actions/setup-node@v4 | |
with: | |
node-version: "18.17.0" | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: "adopt-hotspot" | |
java-version: "8" | |
cache: "sbt" | |
- name: Generate code | |
run: | | |
make -j3 gen-api gen-code VERSION=${{ steps.version.outputs.tag }} | |
mkdir webui/dist | |
touch webui/dist/index.html | |
tar -czf /tmp/generated.tar.gz . | |
- name: Store generated code | |
uses: actions/upload-artifact@v3 | |
with: | |
name: generated-code | |
path: /tmp/generated.tar.gz | |
- name: Build Spark direct-access client | |
working-directory: clients/hadoopfs | |
run: | | |
mvn -Passembly -DfinalName=client -DskipTests --batch-mode --update-snapshots package | |
- name: Store client assembly | |
uses: actions/upload-artifact@v3 | |
with: | |
name: client-assembly | |
path: clients/hadoopfs/target/client.jar | |
deploy-image: | |
name: Build and cache Docker image | |
needs: [gen-code] | |
runs-on: ubuntu-22.04 | |
outputs: | |
tag: ${{ steps.version.outputs.tag }} | |
image_id: ${{ steps.build_export.outputs.ImageID }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Setup NodeJS | |
uses: actions/setup-node@v4 | |
with: | |
node-version: "18.17.0" | |
- name: Setup Go | |
uses: actions/setup-go@v4 | |
with: | |
go-version: "1.22.6" | |
id: go | |
- name: Retrieve generated code | |
uses: actions/download-artifact@v3 | |
with: | |
name: generated-code | |
path: /tmp/ | |
- name: Unpack generated code | |
run: tar -xzvf /tmp/generated.tar.gz | |
- name: Extract version | |
shell: bash | |
run: echo "tag=sha-$(git rev-parse --short HEAD | sed s/^v//g)" >> $GITHUB_OUTPUT | |
id: version | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v2 | |
- name: Build and export | |
uses: docker/build-push-action@v5 | |
id: build_export | |
with: | |
context: . | |
file: ./Dockerfile | |
tags: treeverse/lakefs:${{ steps.version.outputs.tag }} | |
outputs: type=docker,dest=/tmp/lakefs.tar | |
- name: Upload artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: lakefs-image | |
path: /tmp/lakefs.tar | |
compatibility-checks-client: | |
name: Test lakeFS Hadoop FileSystem compatibility | |
needs: [gen-code] | |
strategy: | |
fail-fast: false | |
matrix: | |
# Removing a version from this list means the published client is no longer tested for | |
# compatibility with that lakeFS version. Like any other client it can still rely on | |
# the lakeFS 1.0 API guarantees. | |
lakefs_version: | |
- 0.113.0 | |
- 1.0.0 | |
- 1.1.0 | |
- 1.2.0 | |
- 1.3.1 | |
- 1.4.0 | |
- 1.5.0 | |
- 1.6.0 | |
- 1.7.0 | |
- 1.8.0 | |
- 1.9.1 | |
- 1.10.0 | |
- 1.11.1 | |
- 1.12.1 | |
- 1.13.0 | |
- 1.14.1 | |
- 1.15.0 | |
- 1.16.0 | |
- 1.17.0 | |
- 1.18.0 | |
- 1.19.0 | |
- 1.20.0 | |
- 1.21.0 | |
- 1.22.0 | |
- 1.23.0 | |
- 1.24.0 | |
- 1.25.0 | |
- 1.26.1 | |
- 1.27.0 | |
- 1.28.2 | |
- 1.29.0 | |
- 1.30.0 | |
- 1.31.1 | |
- 1.32.1 | |
- 1.33.0 | |
- 1.34.0 | |
- 1.35.0 | |
- 1.36.0 | |
- 1.37.0 | |
- 1.38.0 | |
- 1.39.2 | |
- 1.40.0 | |
- 1.41.0 | |
- 1.42.0 | |
- 1.43.0 | |
- 1.44.0 | |
- 1.45.0 | |
runs-on: ubuntu-22.04 | |
env: | |
TAG: ${{ matrix.lakefs_version }} | |
REPO: treeverse | |
SPARK_TAG: 3 | |
steps: | |
- name: Check-out code | |
uses: actions/checkout@v4 | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: "adopt-hotspot" | |
java-version: "8" | |
cache: "sbt" | |
- name: Package Spark App | |
working-directory: test/spark/app | |
run: sbt sonnets-311/package | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: 3.11 | |
cache: 'pip' | |
- run: pip install -r ./test/spark/requirements.txt | |
- name: Generate uniquifying value | |
id: unique | |
shell: bash | |
run: echo "value=$(cat /proc/sys/kernel/random/uuid)" >> $GITHUB_OUTPUT | |
- name: Start lakeFS for Spark tests | |
uses: ./.github/actions/bootstrap-test-lakefs | |
with: | |
compose-directory: test/spark | |
env: | |
REPO: treeverse | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
LAKEFS_DATABASE_TYPE: postgres | |
LAKEFS_BLOCKSTORE_TYPE: s3 | |
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} | |
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} | |
- name: Retrieve client | |
uses: actions/download-artifact@v3 | |
with: | |
name: client-assembly | |
path: clients/hadoopfs/target | |
- name: Test lakeFS S3 with Spark 3.x thick client | |
timeout-minutes: 8 | |
working-directory: test/spark | |
run: | | |
python ./run-test.py \ | |
--storage_namespace s3://esti-system-testing/compatibility/${{ github.run_number }}-spark3-client/${{ steps.unique.outputs.value }} \ | |
--repository thick-client-test \ | |
--sonnet_jar sonnets-311/target/sonnets-311/scala-2.12/sonnets-311_2.12-0.1.0.jar \ | |
--access_mode hadoopfs \ | |
--aws_access_key ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} \ | |
--aws_secret_key ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} | |
- name: lakeFS Logs on Spark with client failure | |
if: ${{ failure() }} | |
continue-on-error: true | |
working-directory: test/spark | |
run: docker compose logs --tail=15000 lakefs | |
compatibility-checks-server: | |
name: Test lakeFS server FileSystem compatibility | |
needs: [deploy-image] | |
strategy: | |
fail-fast: false | |
matrix: | |
# Removing a version from this list means the current lakeFS is no longer compatible with | |
# that Hadoop lakeFS client version. | |
client_version: [ 0.1.10, 0.1.11, 0.1.12, 0.1.13, 0.1.14, 0.1.15, 0.2.1 ] | |
runs-on: ubuntu-22.04 | |
env: | |
CLIENT_VERSION: ${{ matrix.client_version }} | |
TAG: ${{ needs.deploy-image.outputs.tag }} | |
IMAGE_ID: ${{ needs.deploy-image.outputs.image_id }} | |
REPO: treeverse | |
SPARK_TAG: 3 | |
steps: | |
- name: Check-out code | |
uses: actions/checkout@v4 | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v2 | |
- name: Download artifact | |
uses: actions/download-artifact@v3 | |
with: | |
name: lakefs-image | |
path: /tmp | |
- name: Load Docker image | |
run: docker load --input /tmp/lakefs.tar | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: "adopt-hotspot" | |
java-version: "8" | |
cache: "sbt" | |
- name: Package Spark App | |
working-directory: test/spark/app | |
run: sbt sonnets-311/package | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: 3.11 | |
cache: 'pip' | |
- run: pip install -r ./test/spark/requirements.txt | |
- name: Generate uniquifying value | |
id: unique | |
shell: bash | |
run: echo "value=$(cat /proc/sys/kernel/random/uuid)" >> $GITHUB_OUTPUT | |
- name: Start lakeFS for Spark tests | |
uses: ./.github/actions/bootstrap-test-lakefs | |
with: | |
compose-directory: test/spark | |
env: | |
REPO: treeverse | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | |
LAKEFS_DATABASE_TYPE: postgres | |
LAKEFS_BLOCKSTORE_TYPE: s3 | |
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} | |
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} | |
- name: Test lakeFS S3 with Spark 3.x thick client | |
timeout-minutes: 8 | |
working-directory: test/spark | |
run: | | |
python ./run-test.py \ | |
--storage_namespace s3://esti-system-testing/compatibility/${{ github.run_number }}-spark3-client/${{ steps.unique.outputs.value }} \ | |
--repository thick-client-test \ | |
--sonnet_jar sonnets-311/target/sonnets-311/scala-2.12/sonnets-311_2.12-0.1.0.jar \ | |
--access_mode hadoopfs \ | |
--aws_access_key ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} \ | |
--aws_secret_key ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} \ | |
--client_version ${{ matrix.client_version }} | |
- name: lakeFS Logs on Spark with client failure | |
if: ${{ failure() }} | |
continue-on-error: true | |
working-directory: test/spark | |
run: docker compose logs --tail=15000 lakefs | |
notify-slack: | |
name: Notify slack on workflow failures | |
needs: [compatibility-checks-client, compatibility-checks-server] | |
runs-on: ubuntu-22.04 | |
if: ${{ always() && contains(needs.*.result, 'failure') }} | |
steps: | |
- name: slack-send | |
uses: slackapi/[email protected] | |
env: | |
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} | |
with: | |
payload: | | |
{ | |
"mrkdwn": true, | |
"text": "Compatibility tests failure in master branch: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" | |
} |