Skip to content

Fix SeekGE in DDB (#6561) #260

Fix SeekGE in DDB (#6561)

Fix SeekGE in DDB (#6561) #260

name: lakeFS HadoopFS Compatibility Tests
on:
push:
branches:
- master
workflow_dispatch: {}
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
permissions:
id-token: write
contents: read
jobs:
gen-code:
name: Generate code from latest lakeFS app
runs-on: ubuntu-20.04
steps:
- name: Check-out code
uses: actions/checkout@v3
- name: Setup Go
uses: actions/setup-go@v3
with:
go-version: "1.20.6"
id: go
- uses: actions/setup-node@v3
with:
node-version: "18.17.0"
- uses: actions/setup-java@v3
with:
distribution: "adopt-hotspot"
java-version: "8"
cache: "sbt"
- name: Generate code
run: |
make -j3 gen-api VERSION=${{ steps.version.outputs.tag }}
mkdir webui/dist
touch webui/dist/index.html
tar -cf /tmp/generated.tar.gz .
- name: Store generated code
uses: actions/upload-artifact@v3
with:
name: generated-code
path: /tmp/generated.tar.gz
- name: Build Spark direct-access client
working-directory: clients/hadoopfs
run: |
mvn -Passembly -DfinalName=client -DskipTests --batch-mode --update-snapshots package
- name: Store client assembly
uses: actions/upload-artifact@v3
with:
name: client-assembly
path: clients/hadoopfs/target/client.jar
deploy-image:
name: Build and cache Docker image
needs: [gen-code]
runs-on: ubuntu-20.04
outputs:
tag: ${{ steps.version.outputs.tag }}
image_id: ${{ steps.build_export.outputs.ImageID }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: "18.17.0"
- name: Setup Go
uses: actions/setup-go@v3
with:
go-version: "1.20.6"
id: go
- name: Retrieve generated code
uses: actions/download-artifact@v3
with:
name: generated-code
path: /tmp/
- name: Unpack generated code
run: tar -xf /tmp/generated.tar.gz
- name: Extract version
shell: bash
run: echo "tag=sha-$(git rev-parse --short HEAD | sed s/^v//g)" >> $GITHUB_OUTPUT
id: version
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build and export
uses: docker/build-push-action@v3
id: build_export
with:
context: .
file: ./Dockerfile
tags: treeverse/lakefs:${{ steps.version.outputs.tag }}
outputs: type=docker,dest=/tmp/lakefs.tar
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: lakefs-image
path: /tmp/lakefs.tar
compatibility-checks-client:
name: Test lakeFS Hadoop FileSystem compatibility
needs: gen-code
strategy:
fail-fast: false
matrix:
# Removing a version from this list means the published client is no longer compatible with
# that lakeFS version.
lakefs_version: [ 0.89.1, 0.90.0, 0.91.0, 0.92.0, 0.93.0, 0.94.1, 0.95.0, 0.96.0, 0.96.1, 0.97.4, 0.97.5, 0.98.0, 0.99.0, 0.100.0, 0.101.0, 0.102.0, 0.102.1, 0.102.2, 0.103.0, 0.104.0, 0.105.0, 0.106.2, 0.107.0, 0.107.1, 0.108.0 ]
runs-on: ubuntu-20.04
env:
TAG: ${{ matrix.lakefs_version }}
REPO: treeverse
SPARK_TAG: 3
steps:
- name: Check-out code
uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: "adopt-hotspot"
java-version: "8"
cache: "sbt"
- name: Package Spark App
working-directory: test/spark/app
run: sbt sonnets-311/package
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- run: pip install -r ./test/spark/requirements.txt
- name: Generate uniquifying value
id: unique
run: echo "value=$RANDOM" >> $GITHUB_OUTPUT
- name: Start lakeFS for Spark tests
uses: ./.github/actions/bootstrap-test-lakefs
with:
compose-directory: test/spark
env:
REPO: treeverse
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
LAKEFS_DATABASE_TYPE: postgres
LAKEFS_BLOCKSTORE_TYPE: s3
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: Retrieve client
uses: actions/download-artifact@v3
with:
name: client-assembly
path: clients/hadoopfs/target
- name: Test lakeFS S3 with Spark 3.x thick client
timeout-minutes: 8
working-directory: test/spark
run: |
python ./run-test.py \
--storage_namespace s3://esti-system-testing/compatibility/${{ github.run_number }}-spark3-client/${{ steps.unique.outputs.value }} \
--repository thick-client-test \
--sonnet_jar sonnets-311/target/sonnets-311/scala-2.12/sonnets-311_2.12-0.1.0.jar \
--access_mode hadoopfs \
--aws_access_key ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} \
--aws_secret_key ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: lakeFS Logs on Spark with client failure
if: ${{ failure() }}
continue-on-error: true
working-directory: test/spark
run: docker-compose logs --tail=15000 lakefs
compatibility-checks-server:
name: Test lakeFS server FileSystem compatibility
needs: [gen-code, deploy-image]
strategy:
fail-fast: false
matrix:
# Removing a version from this list means the current lakeFS is no longer compatible with
# that Hadoop lakeFS client version.
client_version: [ 0.1.10, 0.1.11, 0.1.12, 0.1.13, 0.1.14, 0.1.15 ]
runs-on: ubuntu-20.04
env:
CLIENT_VERSION: ${{ matrix.client_version }}
TAG: ${{ needs.deploy-image.outputs.tag }}
IMAGE_ID: ${{ needs.deploy-image.outputs.image_id }}
REPO: treeverse
SPARK_TAG: 3
steps:
- name: Check-out code
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: lakefs-image
path: /tmp
- name: Load Docker image
run: docker load --input /tmp/lakefs.tar
- uses: actions/setup-java@v3
with:
distribution: "adopt-hotspot"
java-version: "8"
cache: "sbt"
- name: Package Spark App
working-directory: test/spark/app
run: sbt sonnets-311/package
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- run: pip install -r ./test/spark/requirements.txt
- name: Generate uniquifying value
id: unique
run: echo "value=$RANDOM" >> $GITHUB_OUTPUT
- name: Start lakeFS for Spark tests
uses: ./.github/actions/bootstrap-test-lakefs
with:
compose-directory: test/spark
env:
REPO: treeverse
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
LAKEFS_DATABASE_TYPE: postgres
LAKEFS_BLOCKSTORE_TYPE: s3
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: Test lakeFS S3 with Spark 3.x thick client
timeout-minutes: 8
working-directory: test/spark
run: |
python ./run-test.py \
--storage_namespace s3://esti-system-testing/compatibility/${{ github.run_number }}-spark3-client/${{ steps.unique.outputs.value }} \
--repository thick-client-test \
--sonnet_jar sonnets-311/target/sonnets-311/scala-2.12/sonnets-311_2.12-0.1.0.jar \
--access_mode hadoopfs \
--aws_access_key ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} \
--aws_secret_key ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} \
--client_version ${{ matrix.client_version }}
- name: lakeFS Logs on Spark with client failure
if: ${{ failure() }}
continue-on-error: true
working-directory: test/spark
run: docker-compose logs --tail=15000 lakefs
notify-slack:
name: Notify slack on workflow failures
needs: [compatibility-checks-client, compatibility-checks-server]
runs-on: ubuntu-20.04
if: ${{ always() && contains(needs.*.result, 'failure') }}
steps:
- name: slack-send
uses: slackapi/[email protected]
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
with:
payload: |
{
"mrkdwn": true,
"text": "Compatibility tests failure in master branch: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}