Skip to content

Merge pull request #1788 from ByConity/cp-0730-1 #39

Merge pull request #1788 from ByConity/cp-0730-1

Merge pull request #1788 from ByConity/cp-0730-1 #39

Workflow file for this run

name: CI
on:
# Triggers the workflow on push or pull request events but only for the "main" branch
push:
branches: [ "master" , "release/0.4.x" ]
pull_request:
branches: [ "master" , "release/0.4.x" ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
scm_build:
name: Build binary and run stateless tests
# Use digest instead of tag to avoid network issue, original image byconity/byconity-ci:24Jan16
container: byconity/byconity-ci:24Jan16
runs-on: self-hosted
steps:
- name: Preload docker images
run: |
cd /docker_image_archive && ls -1 *.tar | xargs --no-run-if-empty -L 1 docker image load -i
- name: Set environment variables and git
run: |
export COMPOSE_PROJECT_NAME=byconity-$(cat /etc/hostname)
echo "COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME}" >> $GITHUB_ENV
echo "BINARY_VOL=${COMPOSE_PROJECT_NAME}_bin_volume" >> $GITHUB_ENV
echo "HDFS_VOL=${COMPOSE_PROJECT_NAME}_hdfs_volume" >> $GITHUB_ENV
echo "CONFIG_VOL=${COMPOSE_PROJECT_NAME}_config_volume" >> $GITHUB_ENV
echo "CONFIG_VOL_FOR_S3=${COMPOSE_PROJECT_NAME}_config_volume_for_s3" >> $GITHUB_ENV
echo "SCRIPTS_VOL=${COMPOSE_PROJECT_NAME}_scripts_volume" >> $GITHUB_ENV
echo "CLICKHOUSE_PORT_TCP=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "CLICKHOUSE_PORT_HTTP=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "CLICKHOUSE_PORT_HDFS=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "CNCH_WRITE_WORKER_PORT_TCP=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "FS_PORT=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "HIVE_SERVER_PORT=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "HMS_PORT=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
echo "HIVE_NAMENODE_PORT=$(python -c 'import socket; s=socket.socket(); s.bind(("", 0)); print(s.getsockname()[1]); s.close()')" >> $GITHUB_ENV
git config --global --add safe.directory $GITHUB_WORKSPACE
- name: Setup git proxy
if: ${{ runner.name != 'ec2-aws-id4-10.10.129.157' }}
run: |
git config --global http.https://github.com.proxy http://${{ secrets.VIP_PROXY }}:3128
- name: Check out repository code
uses: actions/checkout@v4
with:
submodules: recursive
path: '.'
- name: Build binary with build_bin.sh
env:
CUSTOM_CMAKE_BUILD_TYPE: "Release"
run: |
# git -C "$GITHUB_WORKSPACE" submodule sync
# git -C "$GITHUB_WORKSPACE" submodule update --init --recursive --force
./build_bin.sh
- name: run unit tests
run: |
mkdir -p /test_output
bash $GITHUB_WORKSPACE/unittest.sh
- name: glibc_compatibility_check
run: |
python3 $GITHUB_WORKSPACE/tests/ci/compatibility_check.py --binary $GITHUB_WORKSPACE/build/programs/clickhouse
- name: Preparing
run: |
mkdir /CI/
cd /CI/
cp -r $GITHUB_WORKSPACE/docker/ci-deploy/* ./
cp $GITHUB_WORKSPACE/docker/ci-deploy/.env ./
mkdir bin
cp -r $GITHUB_WORKSPACE/build/programs/* ./bin
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" hdfs_config/conf/core-site.xml
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" config/fdb.cluster
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" config/cnch-config.yml
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" config/daemon-manager.yml
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" config_for_s3_storage/fdb.cluster
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" config_for_s3_storage/cnch-config.yml
sed -i "s|COMPOSE\_PROJECT\_NAME|${COMPOSE_PROJECT_NAME}|g" config_for_s3_storage/daemon-manager.yml
echo "Create volumes for ByConity Cluster"
/bin/bash create_docker_volume_ci.sh
- name: Create ByConity Cluster S3
run: |
echo "Bring up ByConity Cluster S3"
cd /CI/
docker compose -f docker-compose-s3.yml up -d
sleep 20
export CLICKHOUSE_HOST=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.Gateway}}{{end}}' ${COMPOSE_PROJECT_NAME}_server-s3-0)
echo "CLICKHOUSE_HOST=${CLICKHOUSE_HOST}" >> $GITHUB_ENV
export CNCH_WRITE_WORKER_HOST=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.Gateway}}{{end}}' ${COMPOSE_PROJECT_NAME}_worker-write-s3-0)
echo "CNCH_WRITE_WORKER_HOST=${CNCH_WRITE_WORKER_HOST}" >> $GITHUB_ENV
echo "Check connection to ByConity Server at ${CLICKHOUSE_HOST}:${CLICKHOUSE_PORT_HTTP}"
curl --retry 11 --retry-delay 5 --retry-connrefused --retry-max-time 120 --max-time 120 $CLICKHOUSE_HOST:$CLICKHOUSE_PORT_HTTP
echo "Check status of cluster"
docker ps --filter name=$COMPOSE_PROJECT_NAME
- name: Run ByConity FuntionalStateless for S3 (w.o optimizer)
continue-on-error: true
run: |
mkdir -p ci_stateless_without_optimizer_s3
cd ci_stateless_without_optimizer_s3
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=0'
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
# We need skip-list to skip some tests.
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
mkdir -p test_output
export TENANT_ID="1234"
export TENANT_DB_PREFIX="${TENANT_ID}."
export TENANT_DEFAULT_DB_PREFIX="${TENANT_ID}\`"
export TENANT_DEFAULT_DB_PREFIX_INQUOTE="${TENANT_ID}\\\`"
export TENANT_HTTP_DEFAULT_DB_PREFIX="${TENANT_ID}%60"
find ./queries -name *.sql |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
find ./queries -name *.reference |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
./clickhouse-test --s3 --use-skip-list --tenant_id $TENANT_ID -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} tenant_id=$TENANT_ID 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_without_optimizer_s3_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
cd ..
rm -rf ci_stateless_without_optimizer_s3
- name: Run ByConity FuntionalStateless for S3 no tenant (w.o optimizer)
continue-on-error: true
run: |
mkdir -p ci_stateless_without_optimizer_s3_no_tenant
cd ci_stateless_without_optimizer_s3_no_tenant
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=0'
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir -p queries_no_tenant
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless_no_tenant queries_no_tenant/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries_no_tenant/
# We need skip-list to skip some tests.
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries_no_tenant/
mkdir -p test_output
./clickhouse-test --s3 --use-skip-list --queries "queries_no_tenant" -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_without_optimizer_s3_no_tenant_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/wo_optimizer_s3_no_tenant_test_result.txt
cd ..
rm -rf ci_stateless_without_optimizer_s3_no_tenant
- name: After S3 tests
if: always()
continue-on-error: true
run: |
docker ps -a --format "{{.ID}} {{.Names}} {{.Image}}" --filter name=$COMPOSE_PROJECT_NAME | grep "byconity/debian:buster-runit-fdb7.1.27" | awk {'print $1"\t"$2'} | xargs -n 2 sh -c 'mkdir -p /test_output/$2; docker cp -q $1:/var/log/byconity /test_output/$2/log' sh || true
- name: Stop ByConity Cluster S3
continue-on-error: true
run: |
cd /CI/
echo "Bring down ByConity Cluster S3"
docker compose -f docker-compose-s3.yml down
docker ps --filter name=$COMPOSE_PROJECT_NAME
docker volume prune
- name: Create ByConity Cluster HDFS
run: |
echo "Bring up ByConity Cluster HDFS"
cd /CI/
docker compose up -d
sleep 10
hdfs_config/script/create_users.sh
export CLICKHOUSE_HOST=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.Gateway}}{{end}}' ${COMPOSE_PROJECT_NAME}_server-0)
echo "CLICKHOUSE_HOST=${CLICKHOUSE_HOST}" >> $GITHUB_ENV
export CNCH_WRITE_WORKER_HOST=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.Gateway}}{{end}}' ${COMPOSE_PROJECT_NAME}_worker-write-0)
echo "CNCH_WRITE_WORKER_HOST=${CNCH_WRITE_WORKER_HOST}" >> $GITHUB_ENV
echo "Check connection to ByConity Server at ${CLICKHOUSE_HOST}:${CLICKHOUSE_PORT_HTTP}"
curl --retry 10 --retry-delay 5 --retry-connrefused --retry-max-time 120 --max-time 120 $CLICKHOUSE_HOST:$CLICKHOUSE_PORT_HTTP
echo "Check status of cluster"
docker ps --filter name=$COMPOSE_PROJECT_NAME
- name: Run ByConity External Table test
continue-on-error: true
run: |
mkdir -p ci_external_table
cd ci_external_table
export EXTRA_OPTIONS='enable_optimizer_fallback=0'
export TENANT_ID="1234"
export TENANT_DB_PREFIX="${TENANT_ID}."
export TENANT_DEFAULT_DB_PREFIX="${TENANT_ID}\`"
export TENANT_DEFAULT_DB_PREFIX_INQUOTE="${TENANT_ID}\\\`"
export TENANT_HTTP_DEFAULT_DB_PREFIX="${TENANT_ID}%60"
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_external_table queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
find ./queries -name *.sql |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
find ./queries -name *.reference |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
mkdir -p test_output
./clickhouse-test --tenant_id $TENANT_ID -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} tenant_id=${TENANT_ID} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/external_table_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/external_table_test_result.txt
cd ..
rm -rf ci_external_table
- name: Run ByConity FuntionalStateless (w.o optimizer)
continue-on-error: true
run: |
mkdir -p ci_stateless_without_optimizer
cd ci_stateless_without_optimizer
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=0'
export TENANT_ID="1234"
export TENANT_DB_PREFIX="${TENANT_ID}."
export TENANT_DEFAULT_DB_PREFIX="${TENANT_ID}\`"
export TENANT_DEFAULT_DB_PREFIX_INQUOTE="${TENANT_ID}\\\`"
export TENANT_HTTP_DEFAULT_DB_PREFIX="${TENANT_ID}%60"
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
find ./queries -name *.sql |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
find ./queries -name *.reference |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
mkdir -p test_output
./clickhouse-test --use-skip-list --tenant_id ${TENANT_ID} -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} tenant_id=${TENANT_ID} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_without_optimizer_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/wo_optimizer_test_result.txt
cd ..
rm -rf ci_stateless_without_optimizer
- name: Run ByConity FuntionalStateless no tenant (w.o optimizer)
continue-on-error: true
run: |
mkdir -p ci_stateless_without_optimizer_no_tenant
cd ci_stateless_without_optimizer_no_tenant
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=0'
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless_no_tenant queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
mkdir -p test_output
./clickhouse-test --use-skip-list -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_without_optimizer_no_tenant_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/wo_optimizer_no_tenant_test_result.txt
cd ..
rm -rf ci_stateless_without_optimizer_no_tenant
- name: Run ByConity FuntionalStateless (w optimizer)
continue-on-error: true
run: |
mkdir -p ci_stateless_with_optimizer
cd ci_stateless_with_optimizer
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
mkdir -p test_output
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=1'
export TENANT_ID="1234"
export TENANT_DB_PREFIX="${TENANT_ID}."
export TENANT_DEFAULT_DB_PREFIX="${TENANT_ID}\`"
export TENANT_DEFAULT_DB_PREFIX_INQUOTE="${TENANT_ID}\\\`"
export TENANT_HTTP_DEFAULT_DB_PREFIX="${TENANT_ID}%60"
find ./queries -name *.sql |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
find ./queries -name *.reference |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
./clickhouse-test --use-skip-list --tenant_id ${TENANT_ID} -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} tenant_id=${TENANT_ID} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_with_optimizer_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/w_optimizer_test_result.txt
cd ..
rm -rf ci_stateless_with_optimizer
- name: Run ByConity FuntionalStateless (w optimizer + bsp_mode)
continue-on-error: true
run: |
mkdir -p ci_stateless_with_optimizer_bsp
cd ci_stateless_with_optimizer_bsp
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
mkdir -p test_output
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=1 bsp_mode=1'
export TENANT_ID="1234"
export TENANT_DB_PREFIX="${TENANT_ID}."
export TENANT_DEFAULT_DB_PREFIX="${TENANT_ID}\`"
export TENANT_DEFAULT_DB_PREFIX_INQUOTE="${TENANT_ID}\\\`"
export TENANT_HTTP_DEFAULT_DB_PREFIX="${TENANT_ID}%60"
find ./queries -name *.sql |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
find ./queries -name *.reference |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
./clickhouse-test --use-skip-list -b $GITHUB_WORKSPACE/build/programs/clickhouse --tenant_id ${TENANT_ID} --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} tenant_id=${TENANT_ID} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_with_optimizer_bsp_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/w_optimizer_bsp_test_result.txt
cd ..
rm -rf ci_stateless_with_optimizer_bsp
- name: Run ByConity FuntionalStateless no tenant (w optimizer)
continue-on-error: true
run: |
printf '%s\n' "$CNCH_WRITE_WORKER_HOST"
mkdir -p ci_stateless_with_optimizer_no_tenant
cd ci_stateless_with_optimizer_no_tenant
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless_no_tenant queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
mkdir -p test_output
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=1'
./clickhouse-test --use-skip-list -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_no_tenant_with_optimizer_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/w_optimizer_test_result.txt
cd ..
rm -rf ci_stateless_with_optimizer_no_tenant
- name: Run ByConity FuntionalStateless no tenant (w optimizer bsp)
continue-on-error: true
run: |
printf '%s\n' "$CNCH_WRITE_WORKER_HOST"
mkdir -p ci_stateless_with_optimizer_bsp_no_tenant
cd ci_stateless_with_optimizer_bsp_no_tenant
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/4_cnch_stateless_no_tenant queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
echo "Skip 48028_explain_format_json and 48036_verify_nullable_type_after_eliminate_join_by_fk_in_cte"
rm -f ./queries/4_cnch_stateless_no_tenant/48028_explain_format_json.* ./queries/4_cnch_stateless_no_tenant/48036_verify_nullable_type_after_eliminate_join_by_fk_in_cte.*
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
mkdir -p test_output
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=1 bsp_mode=1'
./clickhouse-test --use-skip-list -b $GITHUB_WORKSPACE/build/programs/clickhouse --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/stateless_no_tenant_with_optimizer_bsp_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/no_tenant_w_optimizer_bsp_test_result.txt
cd ..
rm -rf ci_stateless_with_optimizer_bsp_no_tenant
- name: Run ByConity ClickhouseSQL FuntionalStateless (w.o optimizer)
continue-on-error: true
run: |
mkdir -p ci_clickhouse_sql
cd ci_clickhouse_sql
export EXTRA_OPTIONS='enable_optimizer_fallback=0 enable_optimizer=0'
export TENANT_ID="1234"
export TENANT_DB_PREFIX="${TENANT_ID}."
export TENANT_DEFAULT_DB_PREFIX="${TENANT_ID}\`"
export TENANT_DEFAULT_DB_PREFIX_INQUOTE="${TENANT_ID}\\\`"
export TENANT_HTTP_DEFAULT_DB_PREFIX="${TENANT_ID}%60"
cp -r $GITHUB_WORKSPACE/docker/test/stateless/process_functional_tests_result.py ./
cp -r $GITHUB_WORKSPACE/tests/clickhouse-test ./
mkdir queries
cp -r $GITHUB_WORKSPACE/tests/queries/7_clickhouse_sql queries/
cp -r $GITHUB_WORKSPACE/tests/queries/shell_config.sh queries/
cp $GITHUB_WORKSPACE/tests/queries/skip_list.json queries/
find ./queries -name *.sql |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
find ./queries -name *.reference |xargs -I {} sed -i "s/\${TENANT_DB_PREFIX}/$TENANT_DB_PREFIX/" {}
echo "Running test without optimizer"
mkdir -p test_output
./clickhouse-test --use-skip-list -b $GITHUB_WORKSPACE/build/programs/clickhouse --tenant_id ${TENANT_ID} --stop --hung-check --jobs 8 --order asc --print-time --single-server --client-option ${EXTRA_OPTIONS} tenant_id=${TENANT_ID} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/clickhouse_sql_test_results.txt --out-status-file /test_output/check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
mv test_output/test_result.txt /test_output/ChSQL_tenant_wo_optimizer_test_result.txt
cd ..
rm -rf ci_clickhouse_sql
- name: After test
if: always()
continue-on-error: true
run: |
docker ps -a --format "{{.ID}} {{.Names}} {{.Image}}" --filter name=$COMPOSE_PROJECT_NAME | grep "byconity/debian:buster-runit-fdb7.1.27" | awk {'print $1"\t"$2'} | xargs -n 2 sh -c 'mkdir -p /test_output/$2; docker cp -q $1:/var/log/byconity /test_output/$2/log' sh || true
mv /test_output /Artifacts || true
- name: Upload Artifact
if: always()
continue-on-error: true
uses: actions/upload-artifact@v3
with:
name: Artifacts
path: /Artifacts
- name: Analyse Result
run:
/bin/bash $GITHUB_WORKSPACE/docker/test/result.sh
- name: Cleanup docker compose HDFS and S3
if: always()
run: |
cd /CI/
docker compose down --volumes || true
docker compose -f docker-compose-s3.yml down --volumes || true
docker container prune -f
docker network prune -f
docker volume prune -f