Skip to content

Update changelog for v1.3.1 (#7142) #563

Update changelog for v1.3.1 (#7142)

Update changelog for v1.3.1 (#7142) #563

name: lakeFS HadoopFS Compatibility Tests
on:
push:
branches:
- master
workflow_dispatch: {}
# These permissions are needed to interact with GitHub's OIDC Token endpoint.
permissions:
id-token: write
contents: read
jobs:
gen-code:
name: Generate code from latest lakeFS app
runs-on: ubuntu-20.04
steps:
- name: Check-out code
uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.4"
id: go
- uses: actions/setup-node@v4
with:
node-version: "18.17.0"
- uses: actions/setup-java@v3
with:
distribution: "adopt-hotspot"
java-version: "8"
cache: "sbt"
- name: Generate code
run: |
make -j3 gen-api VERSION=${{ steps.version.outputs.tag }}
mkdir webui/dist
touch webui/dist/index.html
tar -cf /tmp/generated.tar.gz .
- name: Store generated code
uses: actions/upload-artifact@v3
with:
name: generated-code
path: /tmp/generated.tar.gz
- name: Build Spark direct-access client
working-directory: clients/hadoopfs
run: |
mvn -Passembly -DfinalName=client -DskipTests --batch-mode --update-snapshots package
- name: Store client assembly
uses: actions/upload-artifact@v3
with:
name: client-assembly
path: clients/hadoopfs/target/client.jar
deploy-image:
name: Build and cache Docker image
needs: [gen-code]
runs-on: ubuntu-20.04
outputs:
tag: ${{ steps.version.outputs.tag }}
image_id: ${{ steps.build_export.outputs.ImageID }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: "18.17.0"
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: "1.21.4"
id: go
- name: Retrieve generated code
uses: actions/download-artifact@v3
with:
name: generated-code
path: /tmp/
- name: Unpack generated code
run: tar -xf /tmp/generated.tar.gz
- name: Extract version
shell: bash
run: echo "tag=sha-$(git rev-parse --short HEAD | sed s/^v//g)" >> $GITHUB_OUTPUT
id: version
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build and export
uses: docker/build-push-action@v5
id: build_export
with:
context: .
file: ./Dockerfile
tags: treeverse/lakefs:${{ steps.version.outputs.tag }}
outputs: type=docker,dest=/tmp/lakefs.tar
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: lakefs-image
path: /tmp/lakefs.tar
compatibility-checks-client:
name: Test lakeFS Hadoop FileSystem compatibility
needs: gen-code
strategy:
fail-fast: false
matrix:
# Removing a version from this list means the published client is no longer compatible with
# that lakeFS version.
lakefs_version: [ 0.108.0, 0.109.0, 0.110.0, 0.111.0, 0.111.1, 0.112.1, 0.113.0, 1.0.0, 1.1.0, 1.2.0, 1.3.0 ]
runs-on: ubuntu-20.04
env:
TAG: ${{ matrix.lakefs_version }}
REPO: treeverse
SPARK_TAG: 3
steps:
- name: Check-out code
uses: actions/checkout@v4
- uses: actions/setup-java@v3
with:
distribution: "adopt-hotspot"
java-version: "8"
cache: "sbt"
- name: Package Spark App
working-directory: test/spark/app
run: sbt sonnets-311/package
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- run: pip install -r ./test/spark/requirements.txt
- name: Generate uniquifying value
id: unique
shell: bash
run: echo "value=$(cat /proc/sys/kernel/random/uuid)" >> $GITHUB_OUTPUT
- name: Start lakeFS for Spark tests
uses: ./.github/actions/bootstrap-test-lakefs
with:
compose-directory: test/spark
env:
REPO: treeverse
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
LAKEFS_DATABASE_TYPE: postgres
LAKEFS_BLOCKSTORE_TYPE: s3
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: Retrieve client
uses: actions/download-artifact@v3
with:
name: client-assembly
path: clients/hadoopfs/target
- name: Test lakeFS S3 with Spark 3.x thick client
timeout-minutes: 8
working-directory: test/spark
run: |
python ./run-test.py \
--storage_namespace s3://esti-system-testing/compatibility/${{ github.run_number }}-spark3-client/${{ steps.unique.outputs.value }} \
--repository thick-client-test \
--sonnet_jar sonnets-311/target/sonnets-311/scala-2.12/sonnets-311_2.12-0.1.0.jar \
--access_mode hadoopfs \
--aws_access_key ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} \
--aws_secret_key ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: lakeFS Logs on Spark with client failure
if: ${{ failure() }}
continue-on-error: true
working-directory: test/spark
run: docker-compose logs --tail=15000 lakefs
compatibility-checks-server:
name: Test lakeFS server FileSystem compatibility
needs: [gen-code, deploy-image]
strategy:
fail-fast: false
matrix:
# Removing a version from this list means the current lakeFS is no longer compatible with
# that Hadoop lakeFS client version.
client_version: [ 0.1.10, 0.1.11, 0.1.12, 0.1.13, 0.1.14, 0.1.15, 0.2.1 ]
runs-on: ubuntu-20.04
env:
CLIENT_VERSION: ${{ matrix.client_version }}
TAG: ${{ needs.deploy-image.outputs.tag }}
IMAGE_ID: ${{ needs.deploy-image.outputs.image_id }}
REPO: treeverse
SPARK_TAG: 3
steps:
- name: Check-out code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Download artifact
uses: actions/download-artifact@v3
with:
name: lakefs-image
path: /tmp
- name: Load Docker image
run: docker load --input /tmp/lakefs.tar
- uses: actions/setup-java@v3
with:
distribution: "adopt-hotspot"
java-version: "8"
cache: "sbt"
- name: Package Spark App
working-directory: test/spark/app
run: sbt sonnets-311/package
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- run: pip install -r ./test/spark/requirements.txt
- name: Generate uniquifying value
id: unique
shell: bash
run: echo "value=$(cat /proc/sys/kernel/random/uuid)" >> $GITHUB_OUTPUT
- name: Start lakeFS for Spark tests
uses: ./.github/actions/bootstrap-test-lakefs
with:
compose-directory: test/spark
env:
REPO: treeverse
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
LAKEFS_DATABASE_TYPE: postgres
LAKEFS_BLOCKSTORE_TYPE: s3
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: Test lakeFS S3 with Spark 3.x thick client
timeout-minutes: 8
working-directory: test/spark
run: |
python ./run-test.py \
--storage_namespace s3://esti-system-testing/compatibility/${{ github.run_number }}-spark3-client/${{ steps.unique.outputs.value }} \
--repository thick-client-test \
--sonnet_jar sonnets-311/target/sonnets-311/scala-2.12/sonnets-311_2.12-0.1.0.jar \
--access_mode hadoopfs \
--aws_access_key ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }} \
--aws_secret_key ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }} \
--client_version ${{ matrix.client_version }}
- name: lakeFS Logs on Spark with client failure
if: ${{ failure() }}
continue-on-error: true
working-directory: test/spark
run: docker-compose logs --tail=15000 lakefs
notify-slack:
name: Notify slack on workflow failures
needs: [compatibility-checks-client, compatibility-checks-server]
runs-on: ubuntu-20.04
if: ${{ always() && contains(needs.*.result, 'failure') }}
steps:
- name: slack-send
uses: slackapi/[email protected]
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
with:
payload: |
{
"mrkdwn": true,
"text": "Compatibility tests failure in master branch: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}