diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 271ccddd..9ac94bbd 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -43,9 +43,9 @@ on: required: true ssh_pem_fleet_aws_vm: required: true - proxmox_api_token_id: + proxmox_ve_username: required: true - proxmox_api_token_secret: + proxmox_ve_password: required: true jira_username: required: true @@ -72,9 +72,9 @@ env: JIRA_USERNAME: ${{ secrets.jira_username}} JIRA_PASSWORD: ${{ secrets.jira_password}} # slack channel rd-platform - SLACK_CHANNEL: "C02U028NMB7" + #SLACK_CHANNEL: "C02U028NMB7" # development slack channel - #SLACK_CHANNEL: "C05K2KF1UP8" + SLACK_CHANNEL: "C05K2KF1UP8" jobs: Validate-boostrap-configs: @@ -170,20 +170,9 @@ jobs: path: platform_configs/* retention-days: 5 - Standalone-Validations: - runs-on: ubuntu-20.04 + Fleet-Validations: needs: [Validate-boostrap-configs] - outputs: - slack_thread_id: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - steps: - - name: Pass through - run: echo "Pass" - - Validation-UI-Tests: - needs: [Standalone-Validations] runs-on: integration-pipeline - outputs: - slack_thread_id: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} steps: - name: Cleanup Workspace uses: rtCamp/action-cleanup@master @@ -204,86 +193,116 @@ jobs: python3 -m pip install pyopenssl --upgrade python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - name: unstash robot_configs uses: actions/download-artifact@v3 with: name: robot_configs path: . - - name: Patch robot_configs *.ci with the right full path + - name: Provision remote vms (AWS) + if: ${{ false }} shell: bash run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; + mkdir aws_artifacts + python3 -m pip install awscli + cd staging + export PATH="$HOME/.local/bin:$PATH" + export product="platform" + export version="$PRODUCT_RELEASE_VERSION" + ./ec2_provision.sh + cp -vf infra_ids.txt ../aws_artifacts/ + + - name: Stash ci_infra_artifacts (AWS) + if: ${{ false }} + uses: actions/upload-artifact@v3 + with: + name: ci_infra_artifacts + path: aws_artifacts/* + retention-days: 5 - - name: Setup QA UI tests - id: ui_tests_setup + - name: Install terraform shell: bash run: | - qa_key=ui_tests + wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg + echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list + sudo apt update && sudo apt install terraform -y - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/jira_report.txt /tmp/test_set.txt + - name: Setup infra environment configs + id: infra_env_configs_setup + shell: bash + run: | + env_configs_dir=infra_env_configs + env_configs_version=0.0.1-2 + env_configs_repo_name=devops-tf-env-conf + rm -rf $env_configs_dir export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.jira_report --output_file /tmp/jira_report.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - jira_report=$(cat /tmp/jira_report.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir + integration-pipeline fetch_by_tag --repo $env_configs_repo_name --version $env_configs_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $env_configs_dir + ls -la $env_configs_dir + echo "target_dir=${env_configs_dir}" >> $GITHUB_OUTPUT + + - name: Setup terraform proxmox provisioner + id: provision_infra_setup + shell: bash + run: | + provision_infra_dir=provision_scripts + provision_infra_version=1.0.0-2 + provision_infra_repo_name=devops-tf-proxmox-bpg - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "jira_report=${jira_report}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT + rm -rf $provision_infra_dir + export PATH="$HOME/.local/bin:$PATH" + integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir + ls -la $provision_infra_dir + echo "target_dir=${provision_infra_dir}" >> $GITHUB_OUTPUT - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate + - name: Define Instance names + id: infra_names + shell: bash + run: | + branch=$(echo ${GITHUB_REF#refs/heads/} | sed "s;\.;-;g" ) - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic + local_manager_prefix="ip-$branch-manager" + local_worker_prefix="ip-$branch-worker" + echo "$local_manager_prefix" + echo "$local_worker_prefix" + total_resources=${{ inputs.fleet_number_members }} + ((total_resources+=1)) - popd + echo "manager_prefix=${local_manager_prefix}" >> $GITHUB_OUTPUT + echo "worker_prefix=${local_worker_prefix}" >> $GITHUB_OUTPUT + echo "total_resources=${total_resources}" >> $GITHUB_OUTPUT - - name: Feature File Validation - id: feature_file_ui - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + - name: Provision remote vms (Proxmox) + working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - . venv/bin/activate + multiply_node=$(printf '"hel",%.0s' {1..${{ steps.infra_names.outputs.total_resources }}}) + node_list_str=${multiply_node::-1} - xray download ${{ steps.ui_tests_setup.outputs.test_set }} - xray compare ./tests/feature/ + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' - deactivate + echo "proxmox_host_list=[$node_list_str]">>input.tfvars + echo "fleet_peer_nr=${{ inputs.fleet_number_members }}">>input.tfvars + echo 'fleet_manager_name="${{ steps.infra_names.outputs.manager_prefix }}"'>>input.tfvars + echo 'fleet_password="n/a"'>>input.tfvars + echo 'fleet_peer_name_prefix="${{ steps.infra_names.outputs.worker_prefix }}"'>>input.tfvars + echo 'ip_list=${{ inputs.fleet_ips }}'>>input.tfvars + echo 'proxmox_ve_username="${{ secrets.proxmox_ve_username }}"'>>input.tfvars + echo 'proxmox_ve_password="${{ secrets.proxmox_ve_password }}"'>>input.tfvars + echo "\n">>input - - name: Prepare QA Feature File Validation slack message + echo "File args: $var_file_arg" + echo "Input File args: $(cat input.tfvars)" + terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" + terraform apply -auto-approve $var_file_arg -var-file=input.tfvars + terraform refresh $var_file_arg -var-file=input.tfvars + + - name: Prepare Devops provisioning slack message if: always() - id: pre_slack + id: pre_slack_infra run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is unstable :rain_cloud: \ - ${{ github.job }} feature file validation: ${{ steps.feature_file_ui.outcome }} \ + MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is being impacted by an infrastructural issue. \ + Provisioning of fleet infrastructure failed. Please take a look! \ Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT @@ -291,96 +310,163 @@ jobs: if: failure() uses: slackapi/slack-github-action@v1.23.0 with: - channel-id: "C02PB9A9F45" - slack-message: ${{ steps.pre_slack.outputs.msg_error }} + channel-id: "G0102LEV1CL" + slack-message: ${{ steps.pre_slack_infra.outputs.msg_error }} env: SLACK_BOT_TOKEN: ${{ secrets.slack_token_id }} - - name: Install - id: install + - name: Apply ansible inventory shell: bash run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true + cp ${{ steps.provision_infra_setup.outputs.target_dir }}/provisioned_inventory.yml staging/provisioned_inventory.yml + cat staging/provisioned_inventory.yml + export PATH="$HOME/.local/bin:$PATH" + integration-pipeline get_yml_value --file staging/provisioned_inventory.yml --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt - rm -rf artifacts - mkdir -p artifacts - cp *.json artifacts/ + - name: Setup ansible installation + id: ansible_install_setup + shell: bash + run: | + install_key=ansible_deploy - CONFIG_FILE_NAME="basic-standalone-noetic.json" + rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.target_dir --output_file /tmp/target_dir.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.version --output_file /tmp/version.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.name --output_file /tmp/repo_name.txt + install_infra_dir=$(cat /tmp/target_dir.txt) + install_infra_version=$(cat /tmp/version.txt) + install_infra_repo_name=$(cat /tmp/repo_name.txt) + + rm -rf $install_infra_repo_name + integration-pipeline fetch_by_tag --repo $install_infra_repo_name --version $install_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $install_infra_dir + ls -la $install_infra_dir + echo "target_dir=${install_infra_dir}" >> $GITHUB_OUTPUT + + - name: Ansible install platform + id: ansible_install_platform + working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} + shell: bash + run: | + + echo "${{ secrets.ssh_pem_fleet_aws_vm }}" > ~/.ssh/aws_slave.pem + sudo chmod 600 ~/.ssh/aws_slave.pem + while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 ; do echo Waiting for other software managers to finish... ; sleep 5;done + sudo apt install -y python3.9 python3.9-venv + python3.9 -m venv ansible-venv + source ansible-venv/bin/activate + python3 -m pip install -r requirements.txt + ansible-galaxy install -r requirements.yml --timeout 120 + + stripped_ips=$(echo ${{ inputs.fleet_ips }} | sed "s;\[;;g" | sed "s;];;g" | sed "s; ;;g") + touch ~/.ssh/known_hosts + sudo chmod 600 ~/.ssh/known_hosts + IFS=',' read -r -a stripped_ips_arr <<< $stripped_ips + + manager_ip=${stripped_ips_arr[0]} + echo $manager_ip + echo "manager_ip=${manager_ip}" >> $GITHUB_OUTPUT - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME + for ip in "${stripped_ips_arr[@]}" + do + if [[ $ip == *"/"* ]]; then + ip=${ip%/*} + fi + ssh-keygen -f ~/.ssh/known_hosts -R $ip + ssh-keyscan -H $ip >> ~/.ssh/known_hosts + #ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' + # set -e + # cloud-init status --wait + #' + done - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" + # Ensure cloud init is done on all the hosts + for fleet_host in "manager" "member0" "member1"; do + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'cloud-init status --wait' done - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT + # Start the installation + ansible-playbook install.yml \ + -i ../staging/provisioned_inventory.yml \ + --key-file ~/.ssh/aws_slave.pem \ + --extra-vars=@"$(pwd)/.."/product-manifest.yaml \ + -e fleet_domain_dns="" \ + -e "{\"proxycerts__remote_redis_servers_fqn\": [$(cat ../staging/manager_private_ip.txt)]}" \ + -e '{"fleet_extra_hosts": ["172.22.0.106 registry.hel.mov.ai traefik"]}' \ + --skip-tags "validate,ufw,hardening" + execution_status=$? + deactivate + exit $execution_status - - name: Install dependencies in spawner - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + - name: Setup QA API tests + id: api_tests_setup shell: bash run: | - # install test dependencies on spawner - if [ -f apt-requirements.txt ]; then - ## get spawner container name - CONTAINER_ID=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - ## get apt dependencies - APT_DEPS=$(cat apt-requirements.txt | tr "\n" " ") - ## install - docker exec -t "${CONTAINER_ID}" bash -c " - sudo apt update - sudo apt install -y ${APT_DEPS} - " - fi - - - name: UI tests - timeout-minutes: 120 - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + qa_key=api_tests + + rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt + export PATH="$HOME/.local/bin:$PATH" + + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt + + tests_dir=$(cat /tmp/target_dir.txt) + tests_version=$(cat /tmp/version.txt) + tests_repo_name=$(cat /tmp/repo_name.txt) + + rm -rf $tests_repo_name + integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir + ls -la $tests_dir + + echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT + + # setup venv in a step that is always executed + pushd "${tests_dir}" + rm -rf venv + python3 -m venv venv + . venv/bin/activate + python3 -m pip install pip --upgrade + python3 -m pip install pyopenssl --upgrade + pip install -r requirements.txt + deactivate + popd + + - name: API tests + timeout-minutes: 30 + working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} shell: bash run: | + # install test dependencies on host + sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" . venv/bin/activate - pytest \ - -ra \ - --hub_url http://selenoid-ui.hel.mov.ai \ - --base_url https://${{ steps.agent_info.outputs.ip }}/ \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - --cucumberjson=./results.json + pytest src \ + --movai-ip ${{ steps.ansible_install_platform.outputs.manager_ip }} \ + --movai-user admin \ + --movai-pw admin@123 \ + -m fleet deactivate - - name: Create Xray test execution + - name: Save docker container logs if: always() - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} + shell: bash run: | - export PATH="$HOME/.local/bin:$PATH" + # for sanity + docker ps -a + + for container in backend spawner messager-server; do + CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^${container}-.*") + docker logs "${CONTAINER_ID}" &> "${container}.log" || true + done || true - # get platform version - rm -f /tmp/version.txt - integration-pipeline get_yml_value --file ../product-manifest.yaml --key version --output_file /tmp/version.txt - plat_version=$(cat /tmp/version.txt) + # movai-service + journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - # create test execution - if [ "${{ steps.ui_tests_setup.outputs.jira_report }}" == "True" ] ; then - . venv/bin/activate - xray create ./results.json --version "${plat_version}" --label UI_Automation - deactivate - fi - name: Get current job id if: always() @@ -400,7 +486,7 @@ jobs: if: always() id: pre_slack_result run: | - MESSAGE=":white_check_mark:${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" + MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT @@ -412,7 +498,7 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - name: Slack message failure uses: archive/github-actions-slack@master @@ -422,1962 +508,53 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - name: Save docker container logs + - name: Collect Fleet QA artifacts + working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} if: always() - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} shell: bash + env: + API_DIR: ${{ steps.api_tests_setup.outputs.target_dir }} run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true + rm -rf fleet_qa_artifacts + mkdir -p fleet_qa_artifacts/install + source ansible-venv/bin/activate + # install fleet_tests artifacts + for fleet_host in "manager" "member0" "member1"; do + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u movai-service --since "1hour ago"' > fleet_qa_artifacts/install/$fleet_host.log || true - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" + echo "From $fleet_host:" + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'docker ps -a' > fleet_qa_artifacts/install/$fleet_host-docker_ps.log || true + echo "$(tail -n +2 fleet_qa_artifacts/install/$fleet_host-docker_ps.log )" - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u docker --boot --lines=all' > fleet_qa_artifacts/install/$fleet_host-all-docker.log || true + done - - name: Stash QA artifacts - if: always() - shell: bash - env: - UI_DIR: ${{ steps.ui_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts + deactivate - # tests artifacts + # qa api tests artifacts # *.log and *.zip might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${UI_DIR}"/*.log ./qa_artifacts || true - cp -r "${UI_DIR}"/*.tar ./qa_artifacts || true - cp -r "${UI_DIR}"/*.json ./qa_artifacts || true - cp -r "${UI_DIR}"/*.html ./qa_artifacts || true + mkdir -p fleet_qa_artifacts/api + cp -r "${API_DIR}"/*.log fleet_qa_artifacts/api || true + cp -r "${API_DIR}"/*.tar fleet_qa_artifacts/api || true + cp -r "${API_DIR}"/results/*.zip fleet_qa_artifacts/api || true - - name: Stash QA artifacts + - name: Stash Fleet QA artifacts if: always() uses: actions/upload-artifact@v3 with: - name: qa_artifacts_ui_tests - path: qa_artifacts/* + name: fleet_qa_artifacts + path: ${{ steps.ansible_install_setup.outputs.target_dir }}/fleet_qa_artifacts/* retention-days: 5 - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() + - name: Teardown remote vms (Proxmox) + working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} + if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} shell: bash run: | - docker system prune -f - docker image prune --all -f + + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + terraform destroy -auto-approve $var_file_arg -var-file=input.tfvars - Validation-Install-Tests: - needs: [Standalone-Validations] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Patch robot_configs *.ci with the right full path - shell: bash - run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; - - - name: Setup QA install tests - id: install_tests_setup - shell: bash - run: | - qa_key=install_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/jira_report.txt /tmp/test_set.txt - - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.jira_report --output_file /tmp/jira_report.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - jira_report=$(cat /tmp/jira_report.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "jira_report=${jira_report}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Feature File Validation - id: feature_file_install - working-directory: ${{ steps.install_tests_setup.outputs.target_dir }} - shell: bash - run: | - . venv/bin/activate - - python3 testcasemanagement/testcase_importer.py --target "${{ steps.install_tests_setup.outputs.test_set }}" - python3 testcasemanagement/feature_file_processor.py --validate - - deactivate - - - name: Prepare QA Feature File Validation slack message - if: always() - id: pre_slack - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is unstable :rain_cloud: \ - ${{ github.job }} feature file validation: ${{ steps.feature_file_install.outcome }} \ - Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message failure - if: failure() - uses: slackapi/slack-github-action@v1.23.0 - with: - channel-id: "C02PB9A9F45" - slack-message: ${{ steps.pre_slack.outputs.msg_error }} - env: - SLACK_BOT_TOKEN: ${{ secrets.slack_token_id }} - - - name: Install tests - timeout-minutes: 45 - id: install - working-directory: ${{ steps.install_tests_setup.outputs.target_dir }} - shell: bash - run: | - export PATH="$HOME/.local/bin:$PATH" - - . venv/bin/activate - rm -rf results/* - - if [ "${{ steps.install_tests_setup.outputs.jira_report }}" == "True" ] ; then - pytest tests/ \ - -ra \ - -k '${{ steps.install_tests_setup.outputs.test_set }}' \ - --installPath="." --jsonConfigFilePath="../basic-standalone-noetic.json.ci" \ - --jira_report - else - pytest tests/ \ - -ra \ - -k '${{ steps.install_tests_setup.outputs.test_set }}' \ - --installPath="." --jsonConfigFilePath="../basic-standalone-noetic.json.ci" - fi - - deactivate - - user=$(cat results/credentials.txt | awk -F: '{print $1}') - pwd=$(cat results/credentials.txt | awk -F: '{print $2}') - - echo "movai_user=${user}" >> $GITHUB_OUTPUT - echo "movai_pwd=${pwd}" >> $GITHUB_OUTPUT - - - name: Run mobtest - shell: bash - run: | - container_id=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - docker exec -t "$container_id" bash -c ' - set -e - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple mobtest==${{ env.MOBTEST_VERSION }} --ignore-installed - mobtest proj /opt/ros/noetic/share/ - ' - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.install_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # Spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - INSTALL_DIR: ${{ steps.install_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${INSTALL_DIR}"/*.log ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/*.tar ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/results/*.log ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/results/*.zip ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/results/test_report_*.html ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_install_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Collect Installed components - shell: bash - run: | - mkdir -p artifacts - - used_images=($(docker images --format "{{.Repository}}:{{.Tag}}" | tr ' ' "\n")) - for image in "${used_images[@]}" - do - image_short_name=$(grep -oP "(?<=/$ENV/).*?(?=:)" <<< "$image" || grep -oP "(?<=/devops/).*?(?=:)" <<< "$image" || true) - if [[ "$image_short_name" =~ .*"backend".* || "$image_short_name" =~ .*"spawner".* || "$image_short_name" =~ .*"redis"*.* || "$image_short_name" =~ .*"health-node".* || "" =~ .*"message-server*.*" ]]; - then - echo "scanning $image" - container_ids=($(docker ps -q -f "ancestor=$image" | tr ' ' "\n")) - for container_id in "${container_ids[@]}" - do - container_name=$(docker inspect --format="{{.Name}}" $container_id) - docker exec -t "$container_id" bash -c ' - set -e - - sudo apt update || apt update - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install --upgrade pip || wget https://bootstrap.pypa.io/get-pip.py -O - | python3 - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple movai-package-deployer==${{ env.PACKAGE_DEPLOYER_VERSION }} - package-deployer scan - ls -la /tmp - ' || true - docker cp $container_id:/tmp/deployable.dploy artifacts/$container_name-noetic-deployable.dploy - docker cp $container_id:/tmp/undeployable.dploy artifacts/$container_name-noetic-3rdParty.dploy - done - else - echo "Skipping scan of $image" - fi - done - export PATH="$HOME/.local/bin:$PATH" - package-deployer scan - cp /tmp/deployable.dploy artifacts/host-noetic-deployable.dploy - cp /tmp/undeployable.dploy artifacts/host-noetic-3rdParty.dploy - - - name: Stash deploy_artifacts_noetic - uses: actions/upload-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts/*.dploy - retention-days: 5 - - - name: Stash QA artifacts - if: always() - shell: bash - env: - INSTALL_DIR: ${{ steps.install_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validation-API-Tests: - needs: [Standalone-Validations] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Patch robot_configs *.ci with the right full path - shell: bash - run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; - - - name: Setup QA API tests - id: api_tests_setup - shell: bash - run: | - qa_key=api_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Install - id: install - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME="basic-standalone-noetic.json" - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" - done - - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT - - - name: API tests - timeout-minutes: 30 - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - . venv/bin/activate - - pytest src \ - --movai-ip ${{ steps.agent_info.outputs.ip }} \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - -m "not fleet" - - deactivate - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - - name: Stash QA artifacts - if: always() - shell: bash - env: - API_DIR: ${{ steps.api_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log and *.zip might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${API_DIR}"/*.log ./qa_artifacts || true - cp -r "${API_DIR}"/*.tar ./qa_artifacts || true - cp -r "${API_DIR}"/results/*.zip ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_api_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validation-Flow-Tests: - needs: [Standalone-Validations] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Patch robot_configs *.ci with the right full path - shell: bash - run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; - - - name: Setup QA Flow tests - id: flow_tests_setup - shell: bash - run: | - qa_key=flow_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/test_set.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "version=${tests_version}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Install - id: install - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME="basic-standalone-noetic.json" - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" - done - - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT - execution_status=$? - exit $execution_status - rm movai_service_version - - - name: Flow tests - timeout-minutes: 30 - working-directory: ${{ steps.flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - # install test dependencies on spawner - if [ -f apt-requirements.txt ]; then - ## get spawner container name - CONTAINER_ID=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - ## get apt dependencies - APT_DEPS=$(cat apt-requirements.txt | tr "\n" " ") - ## install - docker exec -t "${CONTAINER_ID}" bash -c " - sudo apt update - sudo apt install -y ${APT_DEPS} - " - fi - - # run tests - . venv/bin/activate - - pytest \ - -s \ - -ra \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - -m '${{ steps.flow_tests_setup.outputs.test_set }}' \ - --tb=short - - deactivate - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - FLOW_DIR: ${{ steps.flow_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts, they might not exist - mkdir -p qa_artifacts - cp -r "${FLOW_DIR}"/*.log ./qa_artifacts || true - cp -r "${FLOW_DIR}"/*.tar ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_flow_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validations-Finish: - needs: [Validation-UI-Tests, Validation-Install-Tests, Validation-API-Tests, Validation-Flow-Tests] - runs-on: ubuntu-20.04 - outputs: - slack_thread_id: ${{ needs.Validation-UI-Tests.outputs.slack_thread_id }} - steps: - - name: Pass through - run: echo "Pass" - - Fleet-Validations: - needs: [Validate-boostrap-configs] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Provision remote vms (AWS) - if: ${{ false }} - shell: bash - run: | - mkdir aws_artifacts - python3 -m pip install awscli - cd staging - export PATH="$HOME/.local/bin:$PATH" - export product="platform" - export version="$PRODUCT_RELEASE_VERSION" - ./ec2_provision.sh - cp -vf infra_ids.txt ../aws_artifacts/ - - - name: Stash ci_infra_artifacts (AWS) - if: ${{ false }} - uses: actions/upload-artifact@v3 - with: - name: ci_infra_artifacts - path: aws_artifacts/* - retention-days: 5 - - - name: Install terraform - shell: bash - run: | - wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg - echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list - sudo apt update && sudo apt install terraform -y - - - name: Setup terraform proxmox provisioner - id: provision_infra_setup - shell: bash - run: | - provision_infra_dir=provision_scripts - provision_infra_version=0.0.1-38 - provision_infra_repo_name=devops-tf-proxmox-fleet - - rm -rf $provision_infra_dir - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir - ls -la $provision_infra_dir - echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT - - - name: Define Instance names - id: infra_names - shell: bash - run: | - branch=$(echo ${GITHUB_REF#refs/heads/} | sed "s;\.;-;g" ) - - local_manager_prefix="ip-$branch-manager" - local_worker_prefix="ip-$branch-worker" - echo "$local_manager_prefix" - echo "$local_worker_prefix" - - echo "manager_prefix=${local_manager_prefix}" >> $GITHUB_OUTPUT - echo "worker_prefix=${local_worker_prefix}" >> $GITHUB_OUTPUT - - - name: Provision remote vms (Proxmox) - working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} - shell: bash - run: | - terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" - terraform plan - terraform apply -auto-approve - env: - TF_VAR_number_agents: ${{ inputs.fleet_number_members }} - TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} - TF_VAR_ip_list: ${{ inputs.fleet_ips }} - TF_VAR_proxmox_host: "hel" - TF_VAR_vm_gateway: "172.22.0.1" - TF_VAR_ip_mask: 24 - TF_VAR_bios: "seabios" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-fleet" - - TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "ubuntu-2004-cloudinit-template2" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.manager_prefix }} - TF_VAR_fleet_manager_memory: 8192 - TF_VAR_template_name_no_gpu: "ubuntu-2004-cloudinit-template2" - TF_VAR_fleet_worker_name_prefix: ${{ steps.infra_names.outputs.worker_prefix }} - TF_VAR_fleet_worker_memory: 8192 - - - name: Prepare Devops provisioning slack message - if: always() - id: pre_slack_infra - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is being impacted by an infrastructural issue. \ - Provisioning of fleet infrastructure failed. Please take a look! \ - Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message failure - if: failure() - uses: slackapi/slack-github-action@v1.23.0 - with: - channel-id: "G0102LEV1CL" - slack-message: ${{ steps.pre_slack_infra.outputs.msg_error }} - env: - SLACK_BOT_TOKEN: ${{ secrets.slack_token_id }} - - - name: Apply ansible inventory - shell: bash - run: | - cp ${{ steps.provision_infra_setup.outputs.target_dir }}/hosts staging/hosts - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file staging/hosts --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt - - - name: Setup ansible installation - id: ansible_install_setup - shell: bash - run: | - install_key=ansible_deploy - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.name --output_file /tmp/repo_name.txt - install_infra_dir=$(cat /tmp/target_dir.txt) - install_infra_version=$(cat /tmp/version.txt) - install_infra_repo_name=$(cat /tmp/repo_name.txt) - - rm -rf $install_infra_repo_name - integration-pipeline fetch_by_tag --repo $install_infra_repo_name --version $install_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $install_infra_dir - ls -la $install_infra_dir - echo "target_dir=${install_infra_dir}" >> $GITHUB_OUTPUT - - - name: Ansible install platform - id: ansible_install_platform - working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} - shell: bash - run: | - - function ensure_agent_up(){ - vm_ip=$1 - i="0" - max=15 - success=1 - while [ $success -ne 0 ] - do - echo "Checking if $vm_ip is reachable ($i/$max)" - ping -c1 $vm_ip &>/dev/null - success=$? - - if [ $i -lt $max ] - then - i=$[$i+1] - else - echo "Timeout waiting for $vm_ip" - exit 2 - fi - - sleep 2 - done - - } - echo "${{ secrets.ssh_pem_fleet_aws_vm }}" > ~/.ssh/aws_slave.pem - sudo chmod 600 ~/.ssh/aws_slave.pem - while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 ; do echo Waiting for other software managers to finish... ; sleep 5;done - sudo apt install -y python3.9 python3.9-venv - python3.9 -m venv ansible-venv - source ansible-venv/bin/activate - python3 -m pip install -r requirements.txt - ansible-galaxy install -r requirements.yml --timeout 120 - - stripped_ips=$(echo ${{ inputs.fleet_ips }} | sed "s;\[;;g" | sed "s;];;g" | sed "s; ;;g") - touch ~/.ssh/known_hosts - sudo chmod 600 ~/.ssh/known_hosts - IFS=',' read -r -a stripped_ips_arr <<< $stripped_ips - manager_ip=${stripped_ips_arr[0]} - echo $manager_ip - echo "manager_ip=${manager_ip}" >> $GITHUB_OUTPUT - for ip in "${stripped_ips_arr[@]}" - do - ensure_agent_up $ip - ssh-keygen -f ~/.ssh/known_hosts -R $ip - ssh-keyscan -H $ip >> ~/.ssh/known_hosts - done - - ansible-playbook install.yml \ - -i ../staging/hosts \ - --key-file ~/.ssh/aws_slave.pem \ - --extra-vars=@"$(pwd)/.."/product-manifest.yaml \ - -e fleet_domain_dns="" \ - -e "{\"proxycerts__remote_redis_servers_fqn\": [$(cat ../staging/manager_private_ip.txt)]}" \ - -e '{"fleet_extra_hosts": ["172.22.0.106 registry.hel.mov.ai traefik"]}' \ - --skip-tags "validate,ufw,hardening" - execution_status=$? - deactivate - exit $execution_status - - - name: Setup QA API tests - id: api_tests_setup - shell: bash - run: | - qa_key=api_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: API tests - timeout-minutes: 30 - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - . venv/bin/activate - - pytest src \ - --movai-ip ${{ steps.ansible_install_platform.outputs.manager_ip }} \ - --movai-user admin \ - --movai-pw admin@123 \ - -m fleet - - deactivate - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - for container in backend spawner messager-server; do - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^${container}-.*") - docker logs "${CONTAINER_ID}" &> "${container}.log" || true - done || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Collect Fleet QA artifacts - working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} - if: always() - shell: bash - env: - API_DIR: ${{ steps.api_tests_setup.outputs.target_dir }} - run: | - rm -rf fleet_qa_artifacts - mkdir -p fleet_qa_artifacts/install - source ansible-venv/bin/activate - # install fleet_tests artifacts - for fleet_host in "manager" "member0" "member1"; do - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u movai-service --since "1hour ago"' > fleet_qa_artifacts/install/$fleet_host.log || true - - echo "From $fleet_host:" - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'docker ps -a' > fleet_qa_artifacts/install/$fleet_host-docker_ps.log || true - echo "$(tail -n +2 fleet_qa_artifacts/install/$fleet_host-docker_ps.log )" - - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u docker --boot --lines=all' > fleet_qa_artifacts/install/$fleet_host-all-docker.log || true - done - - deactivate - - # qa api tests artifacts - # *.log and *.zip might not exist if the test fails early - mkdir -p fleet_qa_artifacts/api - cp -r "${API_DIR}"/*.log fleet_qa_artifacts/api || true - cp -r "${API_DIR}"/*.tar fleet_qa_artifacts/api || true - cp -r "${API_DIR}"/results/*.zip fleet_qa_artifacts/api || true - - - name: Stash Fleet QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: fleet_qa_artifacts - path: ${{ steps.ansible_install_setup.outputs.target_dir }}/fleet_qa_artifacts/* - retention-days: 5 - - - name: Teardown remote vms (Proxmox) - working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} - if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} - shell: bash - run: terraform destroy -auto-approve - env: - TF_VAR_number_agents: ${{ inputs.fleet_number_members }} - TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} - TF_VAR_ip_list: ${{ inputs.fleet_ips }} - - - Build-Simulator: - needs: [Validate-boostrap-configs] - runs-on: integration-pipeline - env: - DISTRO: noetic - outputs: - slack_thread_id: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: unstash sim_configs - uses: actions/download-artifact@v3 - with: - name: sim_configs - path: simulator_artifacts - - - name: Prepare Skip variables - id: pre_simulator_build - run: | - if [ ! -f "simulator_artifacts/version" ]; then - echo "skip_simulator_build=true" >> $GITHUB_OUTPUT - else - echo "skip_simulator_build=false" >> $GITHUB_OUTPUT - fi - - - name: Lint docker image - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - shell: bash - run: | - wget https://github.com/hadolint/hadolint/releases/download/v2.9.3/hadolint-Linux-x86_64 - chmod +x hadolint-Linux-x86_64 - ./hadolint-Linux-x86_64 docker/$DISTRO/Dockerfile-simulator -t error - - - name: Download models - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - shell: bash - run: | - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline fetch_simulator_models \ - --manifest_platform_base_key product_components \ - --gh_api_user $GITHUB_API_USR \ - --gh_api_pwd ${{ secrets.auto_commit_pwd }} \ - --target_dir "./models" - if [ ! -d ./models ]; then mkdir -p ./models; fi - - - name: Login to Private Registry - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Prepare docker build variables - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - id: pre_build - run: | - echo "image_name=$(cat simulator_artifacts/simulator_name.ci)" >> $GITHUB_OUTPUT - echo "base_name=$(cat simulator_artifacts/simulator_base.ci)" >> $GITHUB_OUTPUT - - - name: Build with args and push:${{ inputs.deploy }} - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/amd64 - file: docker/${{ env.DISTRO }}/Dockerfile-simulator - push: true - tags: "${{ env.REGISTRY }}/qa/${{ steps.pre_build.outputs.image_name }}" - pull: true - build-args: | - BASE_IMAGE=${{ steps.pre_build.outputs.base_name }} - CI_SCRIPT_VERSION=${{ env.CI_INTEGRATION_SCRIPTS_VERSION }} - - - name: Collect Installed components - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - shell: bash - run: | - cd simulator_artifacts - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline publish_simulator_state_artifacts \ - --product_name ${{ inputs.product_name }} \ - --branch ${GITHUB_REF#refs/heads/} - - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - - name: pre-stash - shell: bash - run: | - echo "$REGISTRY/qa/$(cat simulator_artifacts/simulator_name.ci)" > simulator.image.artifact - - - name: Stash deploy_simulator_artifacts - uses: actions/upload-artifact@v3 - with: - name: deploy_simulator_artifacts - path: simulator.image.artifact - retention-days: 5 - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Simulator-Validations: - needs: [Build-Simulator] - runs-on: integration-pipeline - steps: - - uses: rtCamp/action-cleanup@master - - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: . - - - name: unstash sim_configs - uses: actions/download-artifact@v3 - with: - name: sim_configs - path: simulator_artifacts - - - name: unstash robot_jsons_noetic - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Login to Private Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Setup QA Flow tests - id: sim_flow_tests_setup - shell: bash - run: | - qa_key=flow_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/test_set.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "version=${tests_version}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Installation - id: install - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - - CONFIG_FILE_NAME="basic-standalone-ignition-noetic.json" - mkdir -p userspace/ - - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" - export PUBLIC_IP=$(hostname -I | awk '{print $1}') - - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" - done - - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT - env: - DISPLAY: ":0" - SIMULATION_ID: "CI" - - - name: Simulator tests - timeout-minutes: 30 - working-directory: ${{ steps.sim_flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - # install test dependencies on spawner - ## get spawner container name - CONTAINER_ID=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - ## get apt dependencies - APT_DEPS=$(cat apt-requirements.txt | tr "\n" " ") - ## install - docker exec -t "${CONTAINER_ID}" bash -c " - sudo apt update - sudo apt install -y ${APT_DEPS} - " - - # run tests - . venv/bin/activate - - pytest \ - -s \ - -ra \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - -m 'simulator' \ - --tb=short - - deactivate - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} - - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.sim_flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - SIM_DIR: ${{ steps.sim_flow_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${SIM_DIR}"/*.log ./qa_artifacts || true - cp -r "${SIM_DIR}"/*.tar ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_simulator_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Collect Installed components - shell: bash - run: | - mkdir -p artifacts - - used_images=($(docker images --format "{{.Repository}}:{{.Tag}}" | tr ' ' "\n")) - for image in "${used_images[@]}" - do - image_short_name=$(grep -oP "(?<=/$ENV/).*?(?=:)" <<< "$image" || grep -oP "(?<=/devops/).*?(?=:)" <<< "$image" || true) - if [[ "$image_short_name" =~ .*"spawner".* ]]; - then - echo "scanning $image" - container_ids=($(docker ps -q -f "ancestor=$image" | tr ' ' "\n")) - for container_id in "${container_ids[@]}" - do - container_name=$(docker inspect --format="{{.Name}}" $container_id) - docker exec -t "$container_id" bash -c ' - set -e - - sudo apt update || apt update - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install --upgrade pip || wget https://bootstrap.pypa.io/get-pip.py -O - | python3 - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple movai-package-deployer==${{ env.PACKAGE_DEPLOYER_VERSION }} - package-deployer scan - ls -la /tmp - ' || true - docker cp $container_id:/tmp/deployable.dploy artifacts/$container_name-noetic-deployable.dploy - docker cp $container_id:/tmp/undeployable.dploy artifacts/$container_name-noetic-3rdParty.dploy - done - else - echo "Skipping scan of $image" - fi - done - export PATH="$HOME/.local/bin:$PATH" - package-deployer scan - cp /tmp/deployable.dploy artifacts/simulator-noetic-deployable.dploy - cp /tmp/undeployable.dploy artifacts/simulator-noetic-3rdParty.dploy - - - name: Stash deploy_artifacts_simulator_noetic - uses: actions/upload-artifact@v3 - with: - name: deploy_artifacts_simulator_noetic - path: artifacts/*.dploy - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - publish: - needs: [Validations-Finish, Fleet-Validations, Simulator-Validations] - runs-on: integration-pipeline - outputs: - slack_thread_id: ${{ needs.Validations-Finish.outputs.slack_thread_id }} - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: unstash deploy_artifacts_noetic - uses: actions/download-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts - - - name: unstash deploy_artifacts_simulator_noetic - uses: actions/download-artifact@v3 - with: - name: deploy_artifacts_simulator_noetic - path: artifacts - - - name: unstash deploy_simulator_artifacts - uses: actions/download-artifact@v3 - with: - name: deploy_simulator_artifacts - path: . - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: Publish and create release - shell: bash - run: | - set -m - set -e - - export PATH="$HOME/.local/bin:$PATH" - git config --global --add safe.directory $(pwd) - git config --global user.name '${{ secrets.auto_commit_user }}' - git config --global user.email '${{ secrets.auto_commit_mail }}' - git config --global user.password ${{ secrets.auto_commit_pwd }} - - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - - mkdir -p deployment_artifacts - package-deployer join --dploy_workspace "$(pwd)/artifacts" - integration-pipeline get_image_list_from_manifest --manifest_platform_base_key product_components --docker_registry $REGISTRY - cp *.json deployment_artifacts - cp artifacts/merged.dploy deployment_artifacts/deployable.dploy - echo -e "$(cat ./artifacts/product.image.artifact)\n$(cat ./simulator.image.artifact)" > deployment_artifacts/product.image.artifact - - cp product.version deployment_artifacts - cp product-manifest.yaml deployment_artifacts - product_version=$(cat product.version) - - # danger zone. Everything will be deleted. - mv product-manifest.yaml product-manifest.yaml.bck - - git restore product.version - git restore product-manifest.yaml - git pull - echo "$product_version" > product.version - - git add product.version - git commit -m "[skip actions] Automatic Raise" - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare raise variables - id: pre_raise - run: | - echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT - - - name: Raise App version - uses: CasperWA/push-protected@v2.14.0 - with: - token: ${{ secrets.auto_commit_pwd }} - branch: ${{ steps.pre_raise.outputs.branch }} - unprotect_reviews: true - - - name: Github Publish - shell: bash - run: | - commit_hash=$(git log --format="%H" -n 1) - product_version=$(cat product.version) - gh release create -p --generate-notes --target "$commit_hash" -t "${{ inputs.product_name }} $product_version" $product_version - # add all files in the deployment_artifacts folder - find deployment_artifacts -type f -exec gh release upload $product_version {} \; - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Update release notes - shell: bash - run: | - # release version - product_version=$(cat product.version) - - # get existent release body - ORIGINAL_RN=$(gh release view "${product_version}" --json body | jq -r .body) - echo -e "ORIGINAL_RN:\n ${ORIGINAL_RN}" - - # get release PRs - PRS=$(echo "${ORIGINAL_RN}" | sed -rn "s/.* by @.* in https:\/\/github\.com\/${{ github.repository_owner }}\/${{ github.event.repository.name }}\/pull\/([0-9]+).*/\1/p" | tr '\n' ' ') - # change to array - PRS=($PRS) - echo "Found the following PRs: ${PRS[@]}" - - # new release notes file - rm -rf notes.txt - - # What's Changed - with info from PRs - echo "## What's Changed" >> notes.txt - - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No relevant changes." >> notes.txt - else - # PRs exist - for pr in "${PRS[@]}"; do - gh pr view "${pr}" --json body | jq -r .body >> notes.txt - done - fi - echo "" >> notes.txt - - # PRs - echo "## PRs" >> notes.txt - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No PRs." >> notes.txt - else - # PRs exist - echo "${ORIGINAL_RN}" | grep "\* .* by @.* in https://github.com/${{ github.repository_owner }}/" >> notes.txt - fi - echo "" >> notes.txt - - ## Diff - echo "## Diff" >> notes.txt - echo "${ORIGINAL_RN}" | grep "\*\*Full Changelog\*\*" >> notes.txt - - # set new release notes - gh release edit "${product_version}" --notes-file notes.txt - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack - run: | - MESSAGE=":white_check_mark: CI: ${GITHUB_REPOSITORY} (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is stable :sunny: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg }} - slack-update-message-ts: ${{ needs.Validations-Finish.outputs.slack_thread_id }} - - - - Run-Status: - runs-on: ubuntu-20.04 - needs: [publish, Validate-boostrap-configs] - if: ${{ always() && ( needs.publish.result == 'failure' || needs.publish.result == 'cancelled' || needs.publish.result == 'skipped') }} - steps: - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: Copy product configs - shell: bash - run: | - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - - - name: Prepare slack variables - id: pre_slack - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY} (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is unstable (or cancelled) :rain_cloud: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg_error }} - slack-update-message-ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 01706faf..da98f3b8 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -75,9 +75,9 @@ on: required: true proxmox_api_url: required: true - proxmox_api_token_id: + proxmox_ve_username: required: true - proxmox_api_token_secret: + proxmox_ve_password: required: true ssh_priv_key: required: true @@ -100,12 +100,12 @@ env: MID_REGISTRY: registry.cloud.mov.ai USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace - PROVISION_INFRA_REPO: "devops-tf-proxmox-fleet" - PROVISION_INFRA_VERSION: "0.1.0-7" + PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" + PROVISION_INFRA_VERSION: "0.0.0-10" # slack channel movai-projects - SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} + #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel - #SLACK_CHANNEL: "C05K2KF1UP8" + SLACK_CHANNEL: "C05K2KF1UP8" PROJECT_DATA_VIEWER_API: "https://personal-7vf0v2cu.outsystemscloud.com/ProjectDataViewer5/rest/V1//CreateProject" MINIO_S3_URL: "https://s3.mov.ai" @@ -405,268 +405,6 @@ jobs: docker system prune -f docker image prune --all -f - Install-Robot: - needs: [Build-Spawner] - strategy: - matrix: - distro: ${{ fromJSON(inputs.ros_distro) }} - runs-on: integration-pipeline - - steps: - - uses: rtCamp/action-cleanup@master - - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: . - - - name: unstash manifest - uses: actions/download-artifact@v3 - with: - name: manifest - path: . - - - name: unstash robot_jsons_${{ matrix.distro }} - uses: actions/download-artifact@v3 - with: - name: robot_jsons_${{ matrix.distro }} - path: . - - - name: Login to ${{ env.REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Login to ${{ env.PUSH_REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.PUSH_REGISTRY }} - - - name: Login to ${{ env.MID_REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Docker load spawner image - shell: bash - run: | - docker pull "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.PUSH_REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - - - name: Installation - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME=${{ inputs.product_name }}-${{ matrix.distro }}.json - export PATH="$HOME/.local/bin:$PATH" - mkdir -p userspace/ - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - - execution_status=$? - exit $execution_status - rm movai_service_version - - - name: Collect Install logs - continue-on-error: true - run: | - # cleanup - rm -rf install_logs - - mkdir -p install_logs - journalctl -u movai-service -t mobros --since "1hour ago" > install_logs/spawner-firmware.log - journalctl -u movai-service --since "1hour ago" > install_logs/movai-service.log - - - name: Stash Install logs artifacts - continue-on-error: true - if: always() - uses: actions/upload-artifact@v3 - with: - name: install_logs - path: install_logs/* - retention-days: 5 - - - name: Run mobtest - shell: bash - run: | - container_id=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - docker exec -t "$container_id" bash -c ' - set -e - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple mobtest==0.0.4.3 --ignore-installed - mobtest proj /opt/ros/noetic/share/ - ' - - - name: Collect Installed components - if: always() - shell: bash - run: | - container_id=$(docker ps -q -f "ancestor=$REGISTRY/qa/${{ inputs.product_name }}-${{ matrix.distro }}:$(cat product.version)") - docker exec -t "$container_id" bash -c ' - set -e - sudo apt update - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple movai-package-deployer==${{ env.PACKAGE_DEPLOYER_VERSION }} --ignore-installed - package-deployer scan - package-deployer scanAll - ls -la /tmp - - { - echo 'Annotation:*' - echo 'Callback:*' - echo 'Configuration:*' - echo 'Flow:*' - echo 'GraphicScene:*' - echo 'Layout:*' - echo 'Node:*' - echo 'Package:*' - } >> /tmp/manifest.txt - mkdir /tmp/proj_metadata - python3 -m tools.backup -p /tmp/proj_metadata/ -m /tmp/manifest.txt -a export -i - - ' || true - docker cp $container_id:/tmp/deployable.dploy artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-deployable.dploy - docker cp $container_id:/tmp/undeployable.dploy artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-3rdParty.dploy - docker cp $container_id:/tmp/apt_packages.json artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-apt_packages.json - - mkdir -p metadata_artifact tmp_meta - docker cp $container_id:/tmp/proj_metadata/ ./ - tar cvzf ./metadata_artifact/metadata.tar.gz ./proj_metadata - CONFIG_FILE_NAME=${{ inputs.product_name }}-${{ matrix.distro }}.json - - echo "$PUSH_REGISTRY/qa/${{ inputs.product_name }}-${{ matrix.distro }}:$(cat product.version)">artifacts/product-${{ matrix.distro }}.image.artifact - - - name: Un stash dependency_version - if: ${{ inputs.propagate_project == false }} - uses: actions/download-artifact@v3 - with: - name: manifest - path: dependency_version - - - name: Get project and solution version - id: project_and_solution_version - if: ${{ inputs.use_project_data_viewer == true && inputs.propagate_project == false }} - continue-on-error: true - shell: bash - run: | - PROJECT_VERSION=$(cat product.version) - echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_OUTPUT - MOVAI_SOLUTION_VERSION=$(cat dependency_version/base_version) - echo "MOVAI_SOLUTION_VERSION=$MOVAI_SOLUTION_VERSION" >> $GITHUB_OUTPUT - - - name: Publish to project data viewer - id: publish_to_pdv - if: ${{ inputs.use_project_data_viewer == true && inputs.propagate_project == false }} - continue-on-error: true - shell: bash - run: | - curl --location '${{ env.PROJECT_DATA_VIEWER_API}}?Name=${{ inputs.product_name }}-${{ matrix.distro }}&Version=${{ steps.project_and_solution_version.outputs.PROJECT_VERSION }}&SolutionVersion=${{ steps.project_and_solution_version.outputs.MOVAI_SOLUTION_VERSION }}' \ - --header 'Content-Type: application/json' \ - --header 'Authorization: Basic ${{ secrets.pdv_auth_token }}' \ - --data @artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-apt_packages.json - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Build-Spawner.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Build-Spawner.outputs.slack_thread_id }} - - - name: Stash project metadata - - if: always() - uses: actions/upload-artifact@v3 - with: - name: project_metadata - path: metadata_artifact/* - retention-days: 3 - - - name: Stash deploy_artifacts_noetic - uses: actions/upload-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - Build-Simulator: needs: [Validate-boostrap-configs, Build-Spawner] runs-on: integration-pipeline @@ -774,246 +512,28 @@ jobs: - name: Prepare docker build variables if: ${{ inputs.with_simulation == 'true' }} - id: pre_build - run: | - push_name_tmp=$(echo "${{ steps.pre_simulator_build.outputs.image_name }}" | sed "s-${{ env.REGISTRY }}-${{ env.MID_REGISTRY }}-g") - echo "base_name=$(cat simulator_artifacts/simulator_base.ci)" >> $GITHUB_OUTPUT - echo "push_name=$push_name_tmp" >> $GITHUB_OUTPUT - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Docker build - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' && inputs.with_simulation == 'true'}} - shell: bash - run: | - docker build --add-host ${{ env.REGISTRY }}:172.22.0.106 \ - --build-arg BASE_IMAGE=${{ steps.pre_build.outputs.base_name }} \ - --build-arg CI_SCRIPT_VERSION=${{ env.CI_INTEGRATION_SCRIPTS_VERSION }} \ - --file docker/${{ env.DISTRO }}/Dockerfile-simulator \ - --platform linux/amd64 \ - --tag ${{ steps.pre_build.outputs.push_name }} \ - --pull \ - --push . - - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - - name: Setup simulation Tests - id: post_simulator_build - shell: bash - run: | - if [ "${{ inputs.with_simulation_tests }}" = "true" ]; - then - echo "simul_tests_infra=simul-mary-queuer" >> $GITHUB_OUTPUT - else - echo "simul_tests_infra=integration-pipeline" >> $GITHUB_OUTPUT - fi - - Install-Simulator-Robot: - needs: [Build-Spawner, Build-Simulator] - strategy: - matrix: - distro: ${{ fromJSON(inputs.ros_distro) }} - runs-on: integration-pipeline - outputs: - slack_thread_id: ${{ needs.Build-Spawner.outputs.slack_thread_id }} - skip_simulator: ${{ needs.Build-Simulator.outputs.skip_simulator }} - steps: - - uses: rtCamp/action-cleanup@master - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - - - name: Checkout - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/checkout@v3 - - - name: Agent info - if: ${{ inputs.with_simulation == 'true' }} - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: . - - - name: unstash manifest - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/download-artifact@v3 - with: - name: manifest - path: . - - - name: unstash robot_jsons_${{ matrix.distro }} - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/download-artifact@v3 - with: - name: robot_jsons_${{ matrix.distro }} - path: . - - - name: Login to ${{ env.REGISTRY }} Registry - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Login to ${{ env.PUSH_REGISTRY }} Registry - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.PUSH_REGISTRY }} - - - name: Login to ${{ env.MID_REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Docker load spawner image - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash - run: | - docker pull "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.PUSH_REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - - - name: Docker load simulator image - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash - run: | - promoted_name=$(echo "${{ needs.Build-Simulator.outputs.image_name }}" | sed "s-/ci/-/qa/-g" | sed "s-${{ env.MID_REGISTRY }}-${{ env.REGISTRY }}-g") - - docker pull "${{ needs.Build-Simulator.outputs.image_name }}" - docker tag "${{ needs.Build-Simulator.outputs.image_name }}" $promoted_name - - - - name: Installation - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME="standalone-${{ inputs.product_name }}-simulator-${{ matrix.distro }}.json" - mkdir -p userspace/models_database/ userspace/tugbot_ignition/ - - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" - export PUBLIC_IP=$(hostname -I | awk '{print $1}') - - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - - execution_status=$? - exit $execution_status - rm movai_service_version - env: - SIMULATION_ID: "CI" - - - name: Collect Install logs - continue-on-error: true - run: | - # cleanup - rm -rf install_logs - - mkdir -p install_logs - journalctl -u movai-service -t mobros --since "1hour ago" > install_logs/spawner-firmware.log - journalctl -u movai-service --since "1hour ago" > install_logs/movai-service.log - - - name: Stash Install simulator logs artifacts - continue-on-error: true - if: always() - uses: actions/upload-artifact@v3 - with: - name: install_simulator_logs - path: install_logs/* - retention-days: 5 - - - name: Run mobtest - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash + id: pre_build run: | - container_id=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - docker exec -t "$container_id" bash -c ' - set -e - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple mobtest==${{ env.MOBTEST_VERSION }} --ignore-installed - mobtest proj /opt/ros/${{ matrix.distro }}/share/ - ' + push_name_tmp=$(echo "${{ steps.pre_simulator_build.outputs.image_name }}" | sed "s-${{ env.REGISTRY }}-${{ env.MID_REGISTRY }}-g") + echo "base_name=$(cat simulator_artifacts/simulator_base.ci)" >> $GITHUB_OUTPUT + echo "push_name=$push_name_tmp" >> $GITHUB_OUTPUT + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 - - name: Output simulator image - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - id: promote + - name: Docker build + if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' && inputs.with_simulation == 'true'}} shell: bash run: | - sim_img_name="$(echo "${{ needs.Build-Simulator.outputs.image_name }}" | sed "s-/ci/-/qa/-g")" - push_name=$(echo "$sim_img_name" | sed "s-${{ env.REGISTRY }}-${{ env.PUSH_REGISTRY }}-g") + docker build --add-host ${{ env.REGISTRY }}:172.22.0.106 \ + --build-arg BASE_IMAGE=${{ steps.pre_build.outputs.base_name }} \ + --build-arg CI_SCRIPT_VERSION=${{ env.CI_INTEGRATION_SCRIPTS_VERSION }} \ + --file docker/${{ env.DISTRO }}/Dockerfile-simulator \ + --platform linux/amd64 \ + --tag ${{ steps.pre_build.outputs.push_name }} \ + --pull \ + --push . - echo "image_name=$push_name" >> $GITHUB_OUTPUT - name: Get current job id if: always() @@ -1045,7 +565,7 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - name: Slack message failure uses: archive/github-actions-slack@master @@ -1055,31 +575,7 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} - - - name: pre-stash - if: ${{ inputs.with_simulation == 'true' }} - shell: bash - run: | - echo "${{ steps.promote.outputs.image_name }}" > simulator.image.artifact - - - name: Stash deploy_simulator_artifacts - if: ${{ inputs.with_simulation == 'true' }} - uses: actions/upload-artifact@v3 - with: - name: deploy_simulator_artifacts - path: simulator.image.artifact - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - name: Docker cleanups if: always() @@ -1088,6 +584,17 @@ jobs: docker system prune -f docker image prune --all -f + - name: Setup simulation Tests + id: post_simulator_build + shell: bash + run: | + if [ "${{ inputs.with_simulation_tests }}" = "true" ]; + then + echo "simul_tests_infra=simul-mary-queuer" >> $GITHUB_OUTPUT + else + echo "simul_tests_infra=integration-pipeline" >> $GITHUB_OUTPUT + fi + Validation-Simulator-Tests: needs: [Build-Spawner, Build-Simulator] runs-on: ${{ needs.Build-Simulator.outputs.simulator_tests_agent_name }} @@ -1160,8 +667,9 @@ jobs: rm -rf $provision_infra_dir export PATH="$HOME/.local/bin:$PATH" integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir + ls -la $provision_infra_dir - echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT + echo "target_dir=${provision_infra_dir}/" >> $GITHUB_OUTPUT - name: Define Instance names if: ${{ inputs.with_simulation_tests }} @@ -1184,31 +692,48 @@ jobs: terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve + terraform refresh echo "${{ secrets.ssh_priv_key }}" > ~/.ssh/ci_priv_key.pem sudo chmod 600 ~/.ssh/ci_priv_key.pem env: TF_VAR_number_agents: 0 - TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_list: '["dhcp"]' + TF_VAR_ip_main: "dhcp" TF_VAR_storage: "local-lvm" TF_VAR_proxmox_host_list: '["mary"]' - TF_VAR_vm_gateway: "10.10.1.254" - TF_VAR_ip_mask: 23 + #TF_VAR_vm_gateway: "10.10.1.254" + #TF_VAR_ip_mask: 23 TF_VAR_bios: "ovmf" TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-simul-ci" + TF_VAR_tags: '["ip-simul-ci"]' TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "u22dci-gpu" TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} TF_VAR_fleet_manager_memory: 51200 TF_VAR_fleet_manager_cores: 14 - TF_VAR_fleet_manager_disk_size: "110G" + TF_VAR_fleet_manager_disk_size: "110" TF_VAR_fleet_manager_balloon: 0 + TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} + TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} + TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + TF_VAR_vm_core_type: "host" + TF_VAR_vm_disk_size: 110 + TF_VAR_vm_disk_interface: "scsi0" + TF_VAR_vm_type: "q35" + TF_VAR_vm_os_type: "l26" + TF_VAR_vm_network_bridge: "vmbr0" + TF_VAR_vm_network_ip_address: "dhcp" + TF_VAR_vm_disk_storage: "nas-mary" + TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" + TF_VAR_vm_bios: "ovmf" + TF_VAR_cloud_init_storage: "local" + TF_VAR_hostpci_device: '["hostpci0"]' + TF_VAR_hostpci_device_id: '["0000:01:00.0"]' + TF_VAR_hostpci_device_pcie: "true" + TF_VAR_hostpci_device_xvga: "true" + TF_VAR_sleep: "40" + # TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs if: ${{ inputs.with_simulation_tests }} @@ -1397,7 +922,8 @@ jobs: run: | ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' set -e - + wget https://download.nomachine.com/download/8.11/Linux/nomachine_8.11.3_4_amd64.deb + sudo dpkg -i nomachine_8.11.3_4_amd64.deb export DISPLAY="$(w -oush | grep -Eo " :[0-9]+" | uniq | cut -d \ -f 2)" echo "Display detected (dinamic) is $DISPLAY" export DISPLAY=":0" @@ -1418,10 +944,12 @@ jobs: integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" + export USERSPACE_FOLDER_PATH="/opt/movai/robots/userspace" export PUBLIC_IP=$(hostname -I | awk "{print $1}") export SIMULATION_ID="CI" - rm -rf userspace + sudo rm -rf $USERSPACE_FOLDER_PATH + sudo mkdir -p $USERSPACE_FOLDER_PATH/.git/ + sudo chmod -R 777 /opt/movai wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash chmod +x ./QuickStart_$(cat quickstart_version).bash @@ -1446,10 +974,10 @@ jobs: run: | ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' set -e - + sleep 40 export DISPLAY="$(w -oush | grep -Eo " :[0-9]+" | uniq | cut -d \ -f 2)" echo "Display detected (dinamic) is $DISPLAY" - export DISPLAY=":0" + export DISPLAY=":1" echo "Display detected is $DISPLAY" xhost +local:docker @@ -1596,356 +1124,42 @@ jobs: AWS_DEFAULT_OUTPUT: "none" - - name: Teardown remote vms (Proxmox) - working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} - if: ${{ inputs.with_simulation_tests && inputs.debug_simulation_tests_keep_alive == false && always() }} - shell: bash - run: terraform destroy -auto-approve - env: - TF_VAR_number_agents: 0 - TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_list: '["dhcp"]' - TF_VAR_proxmox_host_list: '["mary"]' - TF_VAR_vm_gateway: "10.10.1.254" - TF_VAR_ip_mask: 23 - TF_VAR_bios: "ovmf" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-simul-ci" - TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "u22dci-gpu" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} - TF_VAR_fleet_manager_memory: 30000 - TF_VAR_fleet_manager_cores: 10 - TF_VAR_fleet_manager_disk_size: "110G" - - publish: - needs: [Install-Robot, Install-Simulator-Robot, Validation-Simulator-Tests] - runs-on: integration-pipeline - container: - image: registry.aws.cloud.mov.ai/qa/py-buildserver:v2.0.1 - credentials: - username: ${{secrets.registry_user}} - password: ${{secrets.registry_password}} - steps: - - uses: rtCamp/action-cleanup@master - if: ${{ inputs.is_nightly_run == false }} - - - name: Checkout - uses: actions/checkout@v3 - if: ${{ inputs.is_nightly_run == false }} - - - name: Agent info - if: ${{ inputs.is_nightly_run == false }} - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: unstash deploy_artifacts_noetic - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts - - - name: unstash manifest - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: manifest - path: . - - name: unstash sim_configs - if: ${{ inputs.with_simulation == 'true' && inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: sim_configs - path: simulator_artifacts - - name: unstash project metadata - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: project_metadata - path: . - - - name: unstash deploy_simulator_artifacts - if: ${{ inputs.with_simulation == 'true' && inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: deploy_simulator_artifacts - path: . - - - name: Install CI Scripts - if: ${{ inputs.is_nightly_run == false }} - shell: bash - run: | - python3 -m venv ci_scripts - source ci_scripts/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - deactivate - - - name: Install Package Deployer - if: ${{ inputs.is_nightly_run == false }} - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: Login to ${{ env.MID_REGISTRY }} Registry - if: ${{ inputs.is_nightly_run == false }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Login to ${{ env.PUSH_REGISTRY }} Registry - if: ${{ inputs.is_nightly_run == false }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.PUSH_REGISTRY }} - - - name: Publish simulator state - if: ${{ needs.Install-Simulator-Robot.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' && inputs.is_nightly_run == false }} - shell: bash - run: | - source ci_scripts/bin/activate - cd simulator_artifacts - integration-pipeline publish_simulator_state_artifacts \ - --product_name ${{ inputs.product_name }} \ - --branch ${GITHUB_REF#refs/heads/} - deactivate - - name: Publish and create release - if: ${{ inputs.is_nightly_run == false }} - id: bump - shell: bash - run: | - git config --global --add safe.directory $(pwd) - git config --global user.name '${{ secrets.auto_commit_user }}' - git config --global user.email '${{ secrets.auto_commit_mail }}' - git config --global user.password ${{ secrets.auto_commit_pwd }} - - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - mkdir -p deployment_artifacts - - source ci_scripts/bin/activate - ls -la - mkdir -p pkgs_deployable - cp artifacts/*deployable.dploy pkgs_deployable - package-deployer join --dploy_workspace "$(pwd)/pkgs_deployable" - mv "$(pwd)/pkgs_deployable/merged.dploy" deployment_artifacts/deployable.dploy - - mkdir -p pkgs_undeployable - cp artifacts/*3rdParty.dploy pkgs_undeployable - package-deployer join --dploy_workspace "$(pwd)/pkgs_undeployable" - mv "$(pwd)/pkgs_undeployable/merged.dploy" deployment_artifacts/3rdParty.dploy - - rm -rf pkgs_deployable pkgs_undeployable - - cp artifacts/*.json deployment_artifacts - if [ "${{ inputs.with_simulation }}" = "true" ]; - then - echo -e "$(cat ./artifacts/product-noetic.image.artifact)\n$(cat ./simulator.image.artifact)" > deployment_artifacts/product.image.artifact - else - cp ./artifacts/product-noetic.image.artifact deployment_artifacts/product.image.artifact - fi - #cp ./artifacts/product-noetic.image.artifact deployment_artifacts/product.image.artifact - cp deployment_artifacts/product.image.artifact ./ - - SAVEIFS=$IFS - IFS=$'\n' - images=($(cat product.image.artifact)) - IFS=$SAVEIFS # Restore original IFS - - for image in "${images[@]}" - do - source=$(echo $image | sed "s-/qa/-/ci/-g" | sed "s-${{ env.PUSH_REGISTRY }}-${{ env.MID_REGISTRY }}-g" ) - target=$(echo $image | sed "s-${{ env.MID_REGISTRY }}-${{ env.PUSH_REGISTRY }}-g" ) - docker pull $source - echo "tagging $source as $target" - docker tag $source $target - docker push $target - done - - integration-pipeline patch_manifest_with_spawner - cat product-manifest.yaml - cp product-manifest.yaml deployment_artifacts - - cp metadata.tar.gz deployment_artifacts - - product_version=$(cat product.version) - # danger zone. Everything will be deleted. - git restore product.version - git restore product-manifest.yaml - git pull - echo "$product_version" > product.version - - git add product.version - git commit -m "[skip actions] Automatic Raise" - - echo "version=${product_version}" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare raise variables - if: ${{ inputs.is_nightly_run == false }} - id: pre_raise - run: | - #echo ::set-output name=branch::${GITHUB_REF#refs/heads/} - echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT - - - name: Raise App version - if: ${{ inputs.is_nightly_run == false }} - uses: CasperWA/push-protected@v2.14.0 - with: - token: ${{ secrets.auto_commit_pwd }} - branch: ${{ steps.pre_raise.outputs.branch }} - unprotect_reviews: true - - - name: Github Publish - if: ${{ inputs.is_nightly_run == false }} - shell: bash - run: | - commit_hash=$(git log --format="%H" -n 1) - product_version=$(cat product.version) - gh release create -p --generate-notes --target "$commit_hash" -t "${{ inputs.product_name }} $product_version" $product_version - # add all files in the deployment_artifacts folder - find deployment_artifacts -type f -exec gh release upload $product_version {} \; - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Update release notes - shell: bash - if: ${{ inputs.is_nightly_run == false }} - run: | - # release version - product_version=$(cat product.version) - - # get existent release body - ORIGINAL_RN=$(gh release view "${product_version}" --json body | jq -r .body) - echo -e "ORIGINAL_RN:\n ${ORIGINAL_RN}" - - # get release PRs - PRS=$(echo "${ORIGINAL_RN}" | sed -rn "s/.* by @.* in https:\/\/github\.com\/${{ github.repository_owner }}\/${{ github.event.repository.name }}\/pull\/([0-9]+).*/\1/p" | tr '\n' ' ') - # change to array - PRS=($PRS) - echo "Found the following PRs: ${PRS[@]}" - - # new release notes file - rm -rf notes.txt - - # What's Changed - with info from PRs - echo "## What's Changed" >> notes.txt - - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No relevant changes." >> notes.txt - else - # PRs exist - for pr in "${PRS[@]}"; do - gh pr view "${pr}" --json body | jq -r .body >> notes.txt - done - fi - echo "" >> notes.txt - - # PRs - echo "## PRs" >> notes.txt - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No PRs." >> notes.txt - else - # PRs exist - echo "${ORIGINAL_RN}" | grep "\* .* by @.* in https://github.com/${{ github.repository_owner }}/" >> notes.txt - fi - echo "" >> notes.txt - - ## Diff - echo "## Diff" >> notes.txt - echo "${ORIGINAL_RN}" | grep "\*\*Full Changelog\*\*" >> notes.txt - - # set new release notes - gh release edit "${product_version}" --notes-file notes.txt - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Propagate release - continue-on-error: true - if: ${{ inputs.propagate_project && inputs.is_nightly_run == false }} - shell: bash - run: | - gh workflow run "Propagate base project dependency to projects - On Dispatch" \ - --repo MOV-AI/qa-automations \ - -f repo_name=${GITHUB_REPOSITORY#*/} \ - -f repo_version=${{ steps.bump.outputs.version }} - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack - shell: bash - run: | - MESSAGE=":white_check_mark: CI: ${GITHUB_REPOSITORY} (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is stable :sunny: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - - if [ "${{ inputs.is_nightly_run }}" == "true" ] ; then - MESSAGE=":white_check_mark: NIGHTLY: ${GITHUB_REPOSITORY} ${{inputs.nightly_run_branch}}, (Attempt: #${{ github.run_attempt }}) is stable :sunny: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - fi - echo "msg=$MESSAGE" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg }} - slack-update-message-ts: ${{ needs.Install-Simulator-Robot.outputs.slack_thread_id }} - - Run-Status: - runs-on: ubuntu-20.04 - needs: [publish, Validate-boostrap-configs] - if: ${{ always() && ( needs.publish.result == 'failure' || needs.publish.result == 'cancelled' || needs.publish.result == 'skipped' ) }} - steps: - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: Copy product configs - shell: bash - run: | - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - - - name: Prepare slack variables - id: pre_slack - shell: bash - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is unstable (or canceled) :rain_cloud: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - - if [ "${{ inputs.is_nightly_run }}" == "true" ] ; then - MESSAGE_ERR=":x: NIGHTLY: ${GITHUB_REPOSITORY} ${{inputs.nightly_run_branch}}, (Attempt: #${{ github.run_attempt }}) is unstable (or canceled) :rain_cloud: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - fi - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT + # - name: Teardown remote vms (Proxmox) + # working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} + # if: ${{ inputs.with_simulation_tests && inputs.debug_simulation_tests_keep_alive == false && always() }} + # shell: bash + # run: terraform destroy -auto-approve + # env: + # TF_VAR_number_agents: 0 + # TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + # TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} + # TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} + # TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} + # TF_VAR_ip_main: "dhcp" + # TF_VAR_proxmox_host_list: '["mary"]' + # #TF_VAR_vm_gateway: "10.10.1.254" + # # TF_VAR_ip_mask: 23 + # TF_VAR_bios: "ovmf" + # TF_VAR_pool: "IP-Temp-VMs" + # TF_VAR_tags: '["ip-simul-ci"]' + # TF_VAR_fleet_hosts_user: "devops" + # # TF_VAR_template_name: "u22dci-gpu" + # TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} + # TF_VAR_fleet_manager_memory: 30000 + # TF_VAR_fleet_manager_cores: 10 + # TF_VAR_fleet_manager_disk_size: "110" + # TF_VAR_sleep: "20" + # TF_VAR_vm_core_type: "host" + # TF_VAR_vm_disk_interface: "scsi0" + # TF_VAR_vm_type: "q35" + # TF_VAR_vm_os_type: "l26" + # TF_VAR_vm_network_bridge: "vmbr0" + # TF_VAR_vm_disk_storage: "nas-mary" + # TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" + # TF_VAR_vm_bios: "ovmf" + # TF_VAR_cloud_init_storage: "local" + # TF_VAR_hostpci_device: '["hostpci0"]' + # TF_VAR_hostpci_device_id: '["0000:01:00.0"]' + # TF_VAR_hostpci_device_pcie: "true" + # TF_VAR_hostpci_device_xvga: "true" - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg_error }} - slack-update-message-ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} diff --git a/README.md b/README.md index 8794f57b..1f51d01a 100644 --- a/README.md +++ b/README.md @@ -3,4 +3,4 @@ ## - name: Publish to project data viewer This step publishes data to the [project data viewer website](https://personal-7vf0v2cu.outsystemscloud.com/ProjectDataViewer5/). The site is used to visualize which packages are installed in the spawner container in the projects. By default this step is disabled. To enable set the `use_project_data_viewer` parameter to true in the workflow file of your project. If failed the step is skipped. -The credentials to the site is given in the [confluence page](https://movai.atlassian.net/wiki/spaces/MF/pages/2403074053/Project+Data+Viewer). +The credentials to the site is given in the [confluence page](https://movai.atlassian.net/wiki/spaces/MF/pages/2403074053/Project+Data+Viewer). \ No newline at end of file