From 92cb811e639438bee4041e95bcd049559d2655cd Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Fri, 8 Mar 2024 12:33:18 +0000 Subject: [PATCH 01/78] init bpg --- .../workflows/integration-build-product.yml | 66 +++++++++++++------ 1 file changed, 46 insertions(+), 20 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 01706faf..ae074cb8 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -100,8 +100,8 @@ env: MID_REGISTRY: registry.cloud.mov.ai USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace - PROVISION_INFRA_REPO: "devops-tf-proxmox-fleet" - PROVISION_INFRA_VERSION: "0.1.0-7" + PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" + PROVISION_INFRA_VERSION: "0.0.0-1" # slack channel movai-projects SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel @@ -1160,6 +1160,7 @@ jobs: rm -rf $provision_infra_dir export PATH="$HOME/.local/bin:$PATH" integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir + ls -la $provision_infra_dir echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT @@ -1191,24 +1192,49 @@ jobs: env: TF_VAR_number_agents: 0 TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_list: '["dhcp"]' - TF_VAR_storage: "local-lvm" - TF_VAR_proxmox_host_list: '["mary"]' - TF_VAR_vm_gateway: "10.10.1.254" - TF_VAR_ip_mask: 23 - TF_VAR_bios: "ovmf" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-simul-ci" - TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "u22dci-gpu" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} - TF_VAR_fleet_manager_memory: 51200 - TF_VAR_fleet_manager_cores: 14 - TF_VAR_fleet_manager_disk_size: "110G" - TF_VAR_fleet_manager_balloon: 0 + # TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} + # TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} + # TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} + # TF_VAR_ip_list: '["dhcp"]' + # TF_VAR_storage: "local-lvm" + # TF_VAR_proxmox_host_list: '["mary"]' + # TF_VAR_vm_gateway: "10.10.1.254" + # TF_VAR_ip_mask: 23 + # TF_VAR_bios: "ovmf" + # TF_VAR_pool: "IP-Temp-VMs" + # TF_VAR_tags: "ip-simul-ci" + # TF_VAR_fleet_hosts_user: "devops" + # TF_VAR_template_name: "u22dci-gpu" + # TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} + # TF_VAR_fleet_manager_memory: 51200 + # TF_VAR_fleet_manager_cores: 14 + # TF_VAR_fleet_manager_disk_size: "110G" + # TF_VAR_fleet_manager_balloon: 0 + TF_VAR_PROXMOX_VE_USERNAME: ${{ secrets.proxmox_ve_username }} + TF_VAR_PROXMOX_VE_PASSWORD: ${{ secrets.proxmox_ve_password }} + TF_VAR_ubuntu_name: "jammy" + TF_VAR_ubuntu_version: "22.04" + TF_VAR_proxmox_host: "mary" + TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + TF_VAR_vm_name: "test-bpg-tf" + TF_VAR_vm_memory: 8192 + TF_VAR_vm_cores: 4 + TF_VAR_vm_core_type: "host" + TF_VAR_vm_disk_size: 20 + TF_VAR_vm_disk_interface: "scsi0" + TF_VAR_vm_type: "q35" + TF_VAR_vm_os_type: "l26" + TF_VAR_vm_network_bridge: "vmbr0" + TF_VAR_vm_network_ip_address: "dhcp" + TF_VAR_vm_disk_storage: "nas-mary" + TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" + TF_VAR_vm_bios: "ovmf" + TF_VAR_cloud_init_storage: "local" + TF_VAR_hostpci_device: "hostpci0" + TF_VAR_hostpci_device_id: "0000:01:00.0" + TF_VAR_hostpci_device_pcie: "true" + TF_VAR_hostpci_device_xvga: "true" + TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs if: ${{ inputs.with_simulation_tests }} From cc47d13278ccf4785a7b0ad62a26c59267fc2127 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Fri, 8 Mar 2024 14:07:53 +0000 Subject: [PATCH 02/78] test bpg-img deploy --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index ae074cb8..8635379e 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1191,7 +1191,7 @@ jobs: env: TF_VAR_number_agents: 0 - TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + # TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} # TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} # TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} # TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} From 22dfe238dcc41cc8bb974e261beda243aff691e3 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Fri, 8 Mar 2024 14:47:17 +0000 Subject: [PATCH 03/78] test bpg-img deploy+ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 8635379e..ea0c3230 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1162,7 +1162,7 @@ jobs: integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir ls -la $provision_infra_dir - echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT + echo "target_dir=${provision_infra_dir}/" >> $GITHUB_OUTPUT - name: Define Instance names if: ${{ inputs.with_simulation_tests }} From 1a818fa20261afef343e825a87f1cde332850d3c Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Fri, 8 Mar 2024 15:59:50 +0000 Subject: [PATCH 04/78] test bpg-img deploy++ --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8794f57b..8c618f1f 100644 --- a/README.md +++ b/README.md @@ -3,4 +3,4 @@ ## - name: Publish to project data viewer This step publishes data to the [project data viewer website](https://personal-7vf0v2cu.outsystemscloud.com/ProjectDataViewer5/). The site is used to visualize which packages are installed in the spawner container in the projects. By default this step is disabled. To enable set the `use_project_data_viewer` parameter to true in the workflow file of your project. If failed the step is skipped. -The credentials to the site is given in the [confluence page](https://movai.atlassian.net/wiki/spaces/MF/pages/2403074053/Project+Data+Viewer). +The credentials to the site is given in the [confluence page](https://movai.atlassian.net/wiki/spaces/MF/pages/2403074053/Project+Data+Viewer). From 6fef8117c164f1f728abf25fe116ab8ff2cba53a Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Fri, 8 Mar 2024 16:59:38 +0000 Subject: [PATCH 05/78] test bpg-img deploy+++ --- .github/workflows/integration-build-product.yml | 4 ++-- README.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index ea0c3230..0bd58b96 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -75,9 +75,9 @@ on: required: true proxmox_api_url: required: true - proxmox_api_token_id: + proxmox_ve_username: required: true - proxmox_api_token_secret: + proxmox_ve_password: required: true ssh_priv_key: required: true diff --git a/README.md b/README.md index 8c618f1f..1f51d01a 100644 --- a/README.md +++ b/README.md @@ -3,4 +3,4 @@ ## - name: Publish to project data viewer This step publishes data to the [project data viewer website](https://personal-7vf0v2cu.outsystemscloud.com/ProjectDataViewer5/). The site is used to visualize which packages are installed in the spawner container in the projects. By default this step is disabled. To enable set the `use_project_data_viewer` parameter to true in the workflow file of your project. If failed the step is skipped. -The credentials to the site is given in the [confluence page](https://movai.atlassian.net/wiki/spaces/MF/pages/2403074053/Project+Data+Viewer). +The credentials to the site is given in the [confluence page](https://movai.atlassian.net/wiki/spaces/MF/pages/2403074053/Project+Data+Viewer). \ No newline at end of file From 42819b0441ec051af12f65b9496baa8f2d27d2a4 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Fri, 8 Mar 2024 17:11:42 +0000 Subject: [PATCH 06/78] test bpg-img deploy++++ --- .github/workflows/integration-build-product.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 0bd58b96..6819f054 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1210,8 +1210,8 @@ jobs: # TF_VAR_fleet_manager_cores: 14 # TF_VAR_fleet_manager_disk_size: "110G" # TF_VAR_fleet_manager_balloon: 0 - TF_VAR_PROXMOX_VE_USERNAME: ${{ secrets.proxmox_ve_username }} - TF_VAR_PROXMOX_VE_PASSWORD: ${{ secrets.proxmox_ve_password }} + PROXMOX_VE_USERNAME: ${{ secrets.proxmox_ve_username }} + PROXMOX_VE_PASSWORD: ${{ secrets.proxmox_ve_password }} TF_VAR_ubuntu_name: "jammy" TF_VAR_ubuntu_version: "22.04" TF_VAR_proxmox_host: "mary" From b3086c39ef164a77a824958a39a868742f479d22 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 09:12:59 +0000 Subject: [PATCH 07/78] update vars --- .github/workflows/integration-build-product.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 6819f054..fcf57248 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1210,10 +1210,8 @@ jobs: # TF_VAR_fleet_manager_cores: 14 # TF_VAR_fleet_manager_disk_size: "110G" # TF_VAR_fleet_manager_balloon: 0 - PROXMOX_VE_USERNAME: ${{ secrets.proxmox_ve_username }} - PROXMOX_VE_PASSWORD: ${{ secrets.proxmox_ve_password }} - TF_VAR_ubuntu_name: "jammy" - TF_VAR_ubuntu_version: "22.04" + TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} + TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} TF_VAR_proxmox_host: "mary" TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} TF_VAR_vm_name: "test-bpg-tf" From 8add44548df0e2fc88171fb353221eee323ea610 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 09:27:30 +0000 Subject: [PATCH 08/78] bump infra version --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index fcf57248..8c544747 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-1" + PROVISION_INFRA_VERSION: "0.0.0-2" # slack channel movai-projects SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel From c8e6245fcc1477170efc722d10e810899c4827de Mon Sep 17 00:00:00 2001 From: duartecoelhomovai <84720623+duartecoelhomovai@users.noreply.github.com> Date: Mon, 11 Mar 2024 09:40:22 +0000 Subject: [PATCH 09/78] switch to dev slack branch --- .github/workflows/integration-build-product.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 8c544747..1f353d54 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -103,9 +103,9 @@ env: PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" PROVISION_INFRA_VERSION: "0.0.0-2" # slack channel movai-projects - SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} + #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel - #SLACK_CHANNEL: "C05K2KF1UP8" + SLACK_CHANNEL: "C05K2KF1UP8" PROJECT_DATA_VIEWER_API: "https://personal-7vf0v2cu.outsystemscloud.com/ProjectDataViewer5/rest/V1//CreateProject" MINIO_S3_URL: "https://s3.mov.ai" From b5842ae98035937786b58ced9a6751dd04d79665 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 09:50:04 +0000 Subject: [PATCH 10/78] bump infra version --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 8c544747..0b94fde8 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-2" + PROVISION_INFRA_VERSION: "0.0.0-3" # slack channel movai-projects SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel From d06c25230ce834b8a67c6d0d123cdd34765ae800 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 09:59:15 +0000 Subject: [PATCH 11/78] test credentials for proxmox --- .github/workflows/integration-build-product.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 26578b34..bf017f91 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1182,6 +1182,8 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | + PROXMOX_VE_USERNAME=${{ secrets.proxmox_ve_username }} + PROXMOX_VE_PASSWORD=${{ secrets.proxmox_ve_password }} terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve From 237b0d96130048f8f69d5edf0e73db04fe4e960a Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 12:17:46 +0000 Subject: [PATCH 12/78] fix vars++ --- .github/workflows/integration-build-product.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index bf017f91..76fc4b2a 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-3" + PROVISION_INFRA_VERSION: "0.0.0-4" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel @@ -1182,8 +1182,8 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - PROXMOX_VE_USERNAME=${{ secrets.proxmox_ve_username }} - PROXMOX_VE_PASSWORD=${{ secrets.proxmox_ve_password }} + export PROXMOX_VE_USERNAME=${{ secrets.proxmox_ve_username }} + export PROXMOX_VE_PASSWORD=${{ secrets.proxmox_ve_password }} terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve From e99e769525fe559af16df9f98b311c580cfce04e Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 12:25:43 +0000 Subject: [PATCH 13/78] fix vars+++ --- .github/workflows/integration-build-product.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 76fc4b2a..6fd145d1 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1182,8 +1182,6 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - export PROXMOX_VE_USERNAME=${{ secrets.proxmox_ve_username }} - export PROXMOX_VE_PASSWORD=${{ secrets.proxmox_ve_password }} terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve From fe4a0893c8b7d9cbd16ba10c5a7f8f2aa501e7a8 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 15:14:07 +0000 Subject: [PATCH 14/78] fix vars++++ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 6fd145d1..5dbf94e1 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1232,7 +1232,7 @@ jobs: TF_VAR_hostpci_device_id: "0000:01:00.0" TF_VAR_hostpci_device_pcie: "true" TF_VAR_hostpci_device_xvga: "true" - TF_VAR_hostpci_device_rombar: "true" + # TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs if: ${{ inputs.with_simulation_tests }} From ecd4186e563bfcd352e15d2ff77b845e6c624459 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 15:21:01 +0000 Subject: [PATCH 15/78] fix vars+++++ --- .../workflows/integration-build-product.yml | 50 +++++++++++++------ 1 file changed, 35 insertions(+), 15 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 5dbf94e1..a11e265d 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1628,22 +1628,42 @@ jobs: env: TF_VAR_number_agents: 0 TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} + TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} + TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_list: '["dhcp"]' - TF_VAR_proxmox_host_list: '["mary"]' - TF_VAR_vm_gateway: "10.10.1.254" - TF_VAR_ip_mask: 23 - TF_VAR_bios: "ovmf" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-simul-ci" - TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "u22dci-gpu" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} - TF_VAR_fleet_manager_memory: 30000 - TF_VAR_fleet_manager_cores: 10 - TF_VAR_fleet_manager_disk_size: "110G" + # TF_VAR_ip_list: '["dhcp"]' + # TF_VAR_proxmox_host_list: '["mary"]' + # TF_VAR_vm_gateway: "10.10.1.254" + # TF_VAR_ip_mask: 23 + # TF_VAR_bios: "ovmf" + # TF_VAR_pool: "IP-Temp-VMs" + # TF_VAR_tags: "ip-simul-ci" + # TF_VAR_fleet_hosts_user: "devops" + # TF_VAR_template_name: "u22dci-gpu" + # TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} + # TF_VAR_fleet_manager_memory: 30000 + # TF_VAR_fleet_manager_cores: 10 + # TF_VAR_fleet_manager_disk_size: "110G" + # TF_VAR_proxmox_host: "mary" + TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + TF_VAR_vm_name: "test-bpg-tf" + TF_VAR_vm_memory: 8192 + TF_VAR_vm_cores: 4 + TF_VAR_vm_core_type: "host" + TF_VAR_vm_disk_size: 20 + TF_VAR_vm_disk_interface: "scsi0" + TF_VAR_vm_type: "q35" + TF_VAR_vm_os_type: "l26" + TF_VAR_vm_network_bridge: "vmbr0" + TF_VAR_vm_network_ip_address: "dhcp" + TF_VAR_vm_disk_storage: "nas-mary" + TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" + TF_VAR_vm_bios: "ovmf" + TF_VAR_cloud_init_storage: "local" + TF_VAR_hostpci_device: "hostpci0" + TF_VAR_hostpci_device_id: "0000:01:00.0" + TF_VAR_hostpci_device_pcie: "true" + TF_VAR_hostpci_device_xvga: "true" publish: needs: [Install-Robot, Install-Simulator-Robot, Validation-Simulator-Tests] From 90b463ee64cea0da05558b467cf5104c2b80810f Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 15:25:10 +0000 Subject: [PATCH 16/78] fix teardown --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index a11e265d..18fa0cad 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1645,7 +1645,7 @@ jobs: # TF_VAR_fleet_manager_cores: 10 # TF_VAR_fleet_manager_disk_size: "110G" # TF_VAR_proxmox_host: "mary" - TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + # TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} TF_VAR_vm_name: "test-bpg-tf" TF_VAR_vm_memory: 8192 TF_VAR_vm_cores: 4 From 7a3bfae16cacfa65057e50756a129daf669f21c9 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Mon, 11 Mar 2024 15:37:55 +0000 Subject: [PATCH 17/78] test without rombar and conf ssh --- .../workflows/integration-build-product.yml | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 18fa0cad..246c46b7 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-4" + PROVISION_INFRA_VERSION: "0.0.0-5" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel @@ -1246,18 +1246,18 @@ jobs: echo "host_user=$(echo $user | sed "s;\";;g")" >> $GITHUB_OUTPUT echo "ssh_connect_string=$(echo $user | sed "s;\";;g")@$(echo $ip | sed "s;\";;g")" >> $GITHUB_OUTPUT - - name: Configure SSH - if: ${{ inputs.with_simulation_tests }} - id: remote_ssh_setup - shell: bash - run: | - ssh-keygen -f ~/.ssh/known_hosts -R ${{ steps.infra_outputs.outputs.host_ip }} || true - ssh-keyscan -H ${{ steps.infra_outputs.outputs.host_ip }} >> ~/.ssh/known_hosts || true - - ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' - set -e - cloud-init status --wait; rm -rf ./${{ env.REMOTE_WORKSPACE_PATH }}; mkdir -p ./${{ env.REMOTE_WORKSPACE_PATH }} - ' + # - name: Configure SSH + # if: ${{ inputs.with_simulation_tests }} + # id: remote_ssh_setup + # shell: bash + # run: | + # ssh-keygen -f ~/.ssh/known_hosts -R ${{ steps.infra_outputs.outputs.host_ip }} || true + # ssh-keyscan -H ${{ steps.infra_outputs.outputs.host_ip }} >> ~/.ssh/known_hosts || true + + # ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' + # set -e + # cloud-init status --wait; rm -rf ./${{ env.REMOTE_WORKSPACE_PATH }}; mkdir -p ./${{ env.REMOTE_WORKSPACE_PATH }} + # ' - name: Prepare Devops provisioning slack message From f4267c41d0b66538981775fd2aa5c7900eef1095 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Thu, 14 Mar 2024 21:27:24 +0000 Subject: [PATCH 18/78] test rewrite vars bpg --- .../workflows/integration-build-product.yml | 70 ++++++++----------- 1 file changed, 28 insertions(+), 42 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 246c46b7..ad1a7ec8 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-5" + PROVISION_INFRA_VERSION: "0.0.0-6" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel @@ -1191,32 +1191,24 @@ jobs: env: TF_VAR_number_agents: 0 - # TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - # TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - # TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - # TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - # TF_VAR_ip_list: '["dhcp"]' - # TF_VAR_storage: "local-lvm" - # TF_VAR_proxmox_host_list: '["mary"]' - # TF_VAR_vm_gateway: "10.10.1.254" - # TF_VAR_ip_mask: 23 - # TF_VAR_bios: "ovmf" - # TF_VAR_pool: "IP-Temp-VMs" - # TF_VAR_tags: "ip-simul-ci" - # TF_VAR_fleet_hosts_user: "devops" - # TF_VAR_template_name: "u22dci-gpu" - # TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} - # TF_VAR_fleet_manager_memory: 51200 - # TF_VAR_fleet_manager_cores: 14 - # TF_VAR_fleet_manager_disk_size: "110G" - # TF_VAR_fleet_manager_balloon: 0 + TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} + TF_VAR_ip_list: '["dhcp"]' + TF_VAR_storage: "local-lvm" + TF_VAR_proxmox_host_list: '["mary"]' + #TF_VAR_vm_gateway: "10.10.1.254" + #TF_VAR_ip_mask: 23 + TF_VAR_bios: "ovmf" + TF_VAR_pool: "IP-Temp-VMs" + TF_VAR_tags: "ip-simul-ci" + TF_VAR_fleet_hosts_user: "devops" + TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} + TF_VAR_fleet_manager_memory: 51200 + TF_VAR_fleet_manager_cores: 14 + TF_VAR_fleet_manager_disk_size: "110G" + TF_VAR_fleet_manager_balloon: 0 TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} - TF_VAR_proxmox_host: "mary" TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_vm_name: "test-bpg-tf" - TF_VAR_vm_memory: 8192 - TF_VAR_vm_cores: 4 TF_VAR_vm_core_type: "host" TF_VAR_vm_disk_size: 20 TF_VAR_vm_disk_interface: "scsi0" @@ -1631,31 +1623,25 @@ jobs: TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - # TF_VAR_ip_list: '["dhcp"]' - # TF_VAR_proxmox_host_list: '["mary"]' - # TF_VAR_vm_gateway: "10.10.1.254" + TF_VAR_ip_list: '["dhcp"]' + TF_VAR_proxmox_host_list: '["mary"]' + #TF_VAR_vm_gateway: "10.10.1.254" # TF_VAR_ip_mask: 23 - # TF_VAR_bios: "ovmf" - # TF_VAR_pool: "IP-Temp-VMs" - # TF_VAR_tags: "ip-simul-ci" - # TF_VAR_fleet_hosts_user: "devops" + TF_VAR_bios: "ovmf" + TF_VAR_pool: "IP-Temp-VMs" + TF_VAR_tags: "ip-simul-ci" + TF_VAR_fleet_hosts_user: "devops" # TF_VAR_template_name: "u22dci-gpu" - # TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} - # TF_VAR_fleet_manager_memory: 30000 - # TF_VAR_fleet_manager_cores: 10 - # TF_VAR_fleet_manager_disk_size: "110G" - # TF_VAR_proxmox_host: "mary" - # TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_vm_name: "test-bpg-tf" - TF_VAR_vm_memory: 8192 - TF_VAR_vm_cores: 4 + TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} + TF_VAR_fleet_manager_memory: 30000 + TF_VAR_fleet_manager_cores: 10 + TF_VAR_fleet_manager_disk_size: "110G" + TF_VAR_sleep: "50" TF_VAR_vm_core_type: "host" - TF_VAR_vm_disk_size: 20 TF_VAR_vm_disk_interface: "scsi0" TF_VAR_vm_type: "q35" TF_VAR_vm_os_type: "l26" TF_VAR_vm_network_bridge: "vmbr0" - TF_VAR_vm_network_ip_address: "dhcp" TF_VAR_vm_disk_storage: "nas-mary" TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" TF_VAR_vm_bios: "ovmf" From 9188792f6113a40d5ca507a38b83902ef4e49ab9 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 11:20:30 +0000 Subject: [PATCH 19/78] test deploy --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index ad1a7ec8..746acc1d 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-6" + PROVISION_INFRA_VERSION: "0.0.0-7" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel From f9b20add532efcdbed3253d81c1141777d36e3c0 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 12:46:07 +0000 Subject: [PATCH 20/78] test deploy+ --- .github/workflows/integration-build-product.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 746acc1d..41de6bb8 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1199,12 +1199,12 @@ jobs: #TF_VAR_ip_mask: 23 TF_VAR_bios: "ovmf" TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-simul-ci" + TF_VAR_tags: '["ip-simul-ci"]' TF_VAR_fleet_hosts_user: "devops" TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} TF_VAR_fleet_manager_memory: 51200 TF_VAR_fleet_manager_cores: 14 - TF_VAR_fleet_manager_disk_size: "110G" + TF_VAR_fleet_manager_disk_size: "110" TF_VAR_fleet_manager_balloon: 0 TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} @@ -1220,10 +1220,11 @@ jobs: TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" TF_VAR_vm_bios: "ovmf" TF_VAR_cloud_init_storage: "local" - TF_VAR_hostpci_device: "hostpci0" - TF_VAR_hostpci_device_id: "0000:01:00.0" + TF_VAR_hostpci_device: '["hostpci0"]' + TF_VAR_hostpci_device_id: '["0000:01:00.0"]' TF_VAR_hostpci_device_pcie: "true" TF_VAR_hostpci_device_xvga: "true" + TF_VAR_sleep: "50" # TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs From a7595dcb17d8d78fcff66a01f86c6c442fe01ce3 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 13:17:42 +0000 Subject: [PATCH 21/78] test deploy++ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 41de6bb8..188786d0 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1224,7 +1224,7 @@ jobs: TF_VAR_hostpci_device_id: '["0000:01:00.0"]' TF_VAR_hostpci_device_pcie: "true" TF_VAR_hostpci_device_xvga: "true" - TF_VAR_sleep: "50" + TF_VAR_sleep: "70" # TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs From 80f96b9762b4a3fdfe4c9e0a4602c3a3d0041740 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 13:47:49 +0000 Subject: [PATCH 22/78] test deploy++++ --- .../workflows/integration-build-product.yml | 920 +----------------- 1 file changed, 53 insertions(+), 867 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 188786d0..5328a73e 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -405,268 +405,6 @@ jobs: docker system prune -f docker image prune --all -f - Install-Robot: - needs: [Build-Spawner] - strategy: - matrix: - distro: ${{ fromJSON(inputs.ros_distro) }} - runs-on: integration-pipeline - - steps: - - uses: rtCamp/action-cleanup@master - - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: . - - - name: unstash manifest - uses: actions/download-artifact@v3 - with: - name: manifest - path: . - - - name: unstash robot_jsons_${{ matrix.distro }} - uses: actions/download-artifact@v3 - with: - name: robot_jsons_${{ matrix.distro }} - path: . - - - name: Login to ${{ env.REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Login to ${{ env.PUSH_REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.PUSH_REGISTRY }} - - - name: Login to ${{ env.MID_REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Docker load spawner image - shell: bash - run: | - docker pull "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.PUSH_REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - - - name: Installation - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME=${{ inputs.product_name }}-${{ matrix.distro }}.json - export PATH="$HOME/.local/bin:$PATH" - mkdir -p userspace/ - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - - execution_status=$? - exit $execution_status - rm movai_service_version - - - name: Collect Install logs - continue-on-error: true - run: | - # cleanup - rm -rf install_logs - - mkdir -p install_logs - journalctl -u movai-service -t mobros --since "1hour ago" > install_logs/spawner-firmware.log - journalctl -u movai-service --since "1hour ago" > install_logs/movai-service.log - - - name: Stash Install logs artifacts - continue-on-error: true - if: always() - uses: actions/upload-artifact@v3 - with: - name: install_logs - path: install_logs/* - retention-days: 5 - - - name: Run mobtest - shell: bash - run: | - container_id=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - docker exec -t "$container_id" bash -c ' - set -e - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple mobtest==0.0.4.3 --ignore-installed - mobtest proj /opt/ros/noetic/share/ - ' - - - name: Collect Installed components - if: always() - shell: bash - run: | - container_id=$(docker ps -q -f "ancestor=$REGISTRY/qa/${{ inputs.product_name }}-${{ matrix.distro }}:$(cat product.version)") - docker exec -t "$container_id" bash -c ' - set -e - sudo apt update - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple movai-package-deployer==${{ env.PACKAGE_DEPLOYER_VERSION }} --ignore-installed - package-deployer scan - package-deployer scanAll - ls -la /tmp - - { - echo 'Annotation:*' - echo 'Callback:*' - echo 'Configuration:*' - echo 'Flow:*' - echo 'GraphicScene:*' - echo 'Layout:*' - echo 'Node:*' - echo 'Package:*' - } >> /tmp/manifest.txt - mkdir /tmp/proj_metadata - python3 -m tools.backup -p /tmp/proj_metadata/ -m /tmp/manifest.txt -a export -i - - ' || true - docker cp $container_id:/tmp/deployable.dploy artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-deployable.dploy - docker cp $container_id:/tmp/undeployable.dploy artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-3rdParty.dploy - docker cp $container_id:/tmp/apt_packages.json artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-apt_packages.json - - mkdir -p metadata_artifact tmp_meta - docker cp $container_id:/tmp/proj_metadata/ ./ - tar cvzf ./metadata_artifact/metadata.tar.gz ./proj_metadata - CONFIG_FILE_NAME=${{ inputs.product_name }}-${{ matrix.distro }}.json - - echo "$PUSH_REGISTRY/qa/${{ inputs.product_name }}-${{ matrix.distro }}:$(cat product.version)">artifacts/product-${{ matrix.distro }}.image.artifact - - - name: Un stash dependency_version - if: ${{ inputs.propagate_project == false }} - uses: actions/download-artifact@v3 - with: - name: manifest - path: dependency_version - - - name: Get project and solution version - id: project_and_solution_version - if: ${{ inputs.use_project_data_viewer == true && inputs.propagate_project == false }} - continue-on-error: true - shell: bash - run: | - PROJECT_VERSION=$(cat product.version) - echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_OUTPUT - MOVAI_SOLUTION_VERSION=$(cat dependency_version/base_version) - echo "MOVAI_SOLUTION_VERSION=$MOVAI_SOLUTION_VERSION" >> $GITHUB_OUTPUT - - - name: Publish to project data viewer - id: publish_to_pdv - if: ${{ inputs.use_project_data_viewer == true && inputs.propagate_project == false }} - continue-on-error: true - shell: bash - run: | - curl --location '${{ env.PROJECT_DATA_VIEWER_API}}?Name=${{ inputs.product_name }}-${{ matrix.distro }}&Version=${{ steps.project_and_solution_version.outputs.PROJECT_VERSION }}&SolutionVersion=${{ steps.project_and_solution_version.outputs.MOVAI_SOLUTION_VERSION }}' \ - --header 'Content-Type: application/json' \ - --header 'Authorization: Basic ${{ secrets.pdv_auth_token }}' \ - --data @artifacts/${{ inputs.product_name }}-${{ matrix.distro }}-apt_packages.json - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Build-Spawner.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Build-Spawner.outputs.slack_thread_id }} - - - name: Stash project metadata - - if: always() - uses: actions/upload-artifact@v3 - with: - name: project_metadata - path: metadata_artifact/* - retention-days: 3 - - - name: Stash deploy_artifacts_noetic - uses: actions/upload-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - Build-Simulator: needs: [Validate-boostrap-configs, Build-Spawner] runs-on: integration-pipeline @@ -770,250 +508,32 @@ jobs: with: username: ${{ secrets.registry_user }} password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Prepare docker build variables - if: ${{ inputs.with_simulation == 'true' }} - id: pre_build - run: | - push_name_tmp=$(echo "${{ steps.pre_simulator_build.outputs.image_name }}" | sed "s-${{ env.REGISTRY }}-${{ env.MID_REGISTRY }}-g") - echo "base_name=$(cat simulator_artifacts/simulator_base.ci)" >> $GITHUB_OUTPUT - echo "push_name=$push_name_tmp" >> $GITHUB_OUTPUT - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Docker build - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' && inputs.with_simulation == 'true'}} - shell: bash - run: | - docker build --add-host ${{ env.REGISTRY }}:172.22.0.106 \ - --build-arg BASE_IMAGE=${{ steps.pre_build.outputs.base_name }} \ - --build-arg CI_SCRIPT_VERSION=${{ env.CI_INTEGRATION_SCRIPTS_VERSION }} \ - --file docker/${{ env.DISTRO }}/Dockerfile-simulator \ - --platform linux/amd64 \ - --tag ${{ steps.pre_build.outputs.push_name }} \ - --pull \ - --push . - - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - - name: Setup simulation Tests - id: post_simulator_build - shell: bash - run: | - if [ "${{ inputs.with_simulation_tests }}" = "true" ]; - then - echo "simul_tests_infra=simul-mary-queuer" >> $GITHUB_OUTPUT - else - echo "simul_tests_infra=integration-pipeline" >> $GITHUB_OUTPUT - fi - - Install-Simulator-Robot: - needs: [Build-Spawner, Build-Simulator] - strategy: - matrix: - distro: ${{ fromJSON(inputs.ros_distro) }} - runs-on: integration-pipeline - outputs: - slack_thread_id: ${{ needs.Build-Spawner.outputs.slack_thread_id }} - skip_simulator: ${{ needs.Build-Simulator.outputs.skip_simulator }} - steps: - - uses: rtCamp/action-cleanup@master - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - - - name: Checkout - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/checkout@v3 - - - name: Agent info - if: ${{ inputs.with_simulation == 'true' }} - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: . - - - name: unstash manifest - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/download-artifact@v3 - with: - name: manifest - path: . - - - name: unstash robot_jsons_${{ matrix.distro }} - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: actions/download-artifact@v3 - with: - name: robot_jsons_${{ matrix.distro }} - path: . - - - name: Login to ${{ env.REGISTRY }} Registry - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Login to ${{ env.PUSH_REGISTRY }} Registry - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.PUSH_REGISTRY }} - - - name: Login to ${{ env.MID_REGISTRY }} Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Docker load spawner image - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash - run: | - docker pull "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - docker tag "${{ env.MID_REGISTRY }}/ci/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" "${{ env.PUSH_REGISTRY }}/qa/${{ inputs.product_name }}-${{ matrix.distro }}:${{ needs.Build-Spawner.outputs.raised_version }}" - - - name: Docker load simulator image - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash - run: | - promoted_name=$(echo "${{ needs.Build-Simulator.outputs.image_name }}" | sed "s-/ci/-/qa/-g" | sed "s-${{ env.MID_REGISTRY }}-${{ env.REGISTRY }}-g") - - docker pull "${{ needs.Build-Simulator.outputs.image_name }}" - docker tag "${{ needs.Build-Simulator.outputs.image_name }}" $promoted_name - - - - name: Installation - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME="standalone-${{ inputs.product_name }}-simulator-${{ matrix.distro }}.json" - mkdir -p userspace/models_database/ userspace/tugbot_ignition/ - - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" - export PUBLIC_IP=$(hostname -I | awk '{print $1}') - - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - - execution_status=$? - exit $execution_status - rm movai_service_version - env: - SIMULATION_ID: "CI" - - - name: Collect Install logs - continue-on-error: true - run: | - # cleanup - rm -rf install_logs - - mkdir -p install_logs - journalctl -u movai-service -t mobros --since "1hour ago" > install_logs/spawner-firmware.log - journalctl -u movai-service --since "1hour ago" > install_logs/movai-service.log - - - name: Stash Install simulator logs artifacts - continue-on-error: true - if: always() - uses: actions/upload-artifact@v3 - with: - name: install_simulator_logs - path: install_logs/* - retention-days: 5 + registry: ${{ env.MID_REGISTRY }} - - name: Run mobtest - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - shell: bash + - name: Prepare docker build variables + if: ${{ inputs.with_simulation == 'true' }} + id: pre_build run: | - container_id=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - docker exec -t "$container_id" bash -c ' - set -e - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple mobtest==${{ env.MOBTEST_VERSION }} --ignore-installed - mobtest proj /opt/ros/${{ matrix.distro }}/share/ - ' + push_name_tmp=$(echo "${{ steps.pre_simulator_build.outputs.image_name }}" | sed "s-${{ env.REGISTRY }}-${{ env.MID_REGISTRY }}-g") + echo "base_name=$(cat simulator_artifacts/simulator_base.ci)" >> $GITHUB_OUTPUT + echo "push_name=$push_name_tmp" >> $GITHUB_OUTPUT + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 - - name: Output simulator image - if: ${{ needs.Build-Simulator.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' }} - id: promote + - name: Docker build + if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' && inputs.with_simulation == 'true'}} shell: bash run: | - sim_img_name="$(echo "${{ needs.Build-Simulator.outputs.image_name }}" | sed "s-/ci/-/qa/-g")" - push_name=$(echo "$sim_img_name" | sed "s-${{ env.REGISTRY }}-${{ env.PUSH_REGISTRY }}-g") + docker build --add-host ${{ env.REGISTRY }}:172.22.0.106 \ + --build-arg BASE_IMAGE=${{ steps.pre_build.outputs.base_name }} \ + --build-arg CI_SCRIPT_VERSION=${{ env.CI_INTEGRATION_SCRIPTS_VERSION }} \ + --file docker/${{ env.DISTRO }}/Dockerfile-simulator \ + --platform linux/amd64 \ + --tag ${{ steps.pre_build.outputs.push_name }} \ + --pull \ + --push . - echo "image_name=$push_name" >> $GITHUB_OUTPUT - name: Get current job id if: always() @@ -1045,7 +565,7 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - name: Slack message failure uses: archive/github-actions-slack@master @@ -1055,31 +575,7 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} - - - name: pre-stash - if: ${{ inputs.with_simulation == 'true' }} - shell: bash - run: | - echo "${{ steps.promote.outputs.image_name }}" > simulator.image.artifact - - - name: Stash deploy_simulator_artifacts - if: ${{ inputs.with_simulation == 'true' }} - uses: actions/upload-artifact@v3 - with: - name: deploy_simulator_artifacts - path: simulator.image.artifact - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - name: Docker cleanups if: always() @@ -1088,6 +584,17 @@ jobs: docker system prune -f docker image prune --all -f + - name: Setup simulation Tests + id: post_simulator_build + shell: bash + run: | + if [ "${{ inputs.with_simulation_tests }}" = "true" ]; + then + echo "simul_tests_infra=simul-mary-queuer" >> $GITHUB_OUTPUT + else + echo "simul_tests_infra=integration-pipeline" >> $GITHUB_OUTPUT + fi + Validation-Simulator-Tests: needs: [Build-Spawner, Build-Simulator] runs-on: ${{ needs.Build-Simulator.outputs.simulator_tests_agent_name }} @@ -1185,6 +692,14 @@ jobs: terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve + terraform refresh + ip=$(terraform output manager_ip_address) + if [ -z "$ip" ]; then + echo "Error: IP address not found." + exit 1 + else + echo "IP address found: $ip" + fi echo "${{ secrets.ssh_priv_key }}" > ~/.ssh/ci_priv_key.pem sudo chmod 600 ~/.ssh/ci_priv_key.pem @@ -1239,18 +754,18 @@ jobs: echo "host_user=$(echo $user | sed "s;\";;g")" >> $GITHUB_OUTPUT echo "ssh_connect_string=$(echo $user | sed "s;\";;g")@$(echo $ip | sed "s;\";;g")" >> $GITHUB_OUTPUT - # - name: Configure SSH - # if: ${{ inputs.with_simulation_tests }} - # id: remote_ssh_setup - # shell: bash - # run: | - # ssh-keygen -f ~/.ssh/known_hosts -R ${{ steps.infra_outputs.outputs.host_ip }} || true - # ssh-keyscan -H ${{ steps.infra_outputs.outputs.host_ip }} >> ~/.ssh/known_hosts || true + - name: Configure SSH + if: ${{ inputs.with_simulation_tests }} + id: remote_ssh_setup + shell: bash + run: | + ssh-keygen -f ~/.ssh/known_hosts -R ${{ steps.infra_outputs.outputs.host_ip }} || true + ssh-keyscan -H ${{ steps.infra_outputs.outputs.host_ip }} >> ~/.ssh/known_hosts || true - # ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' - # set -e - # cloud-init status --wait; rm -rf ./${{ env.REMOTE_WORKSPACE_PATH }}; mkdir -p ./${{ env.REMOTE_WORKSPACE_PATH }} - # ' + ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' + set -e + cloud-init status --wait; rm -rf ./${{ env.REMOTE_WORKSPACE_PATH }}; mkdir -p ./${{ env.REMOTE_WORKSPACE_PATH }} + ' - name: Prepare Devops provisioning slack message @@ -1650,333 +1165,4 @@ jobs: TF_VAR_hostpci_device: "hostpci0" TF_VAR_hostpci_device_id: "0000:01:00.0" TF_VAR_hostpci_device_pcie: "true" - TF_VAR_hostpci_device_xvga: "true" - - publish: - needs: [Install-Robot, Install-Simulator-Robot, Validation-Simulator-Tests] - runs-on: integration-pipeline - container: - image: registry.aws.cloud.mov.ai/qa/py-buildserver:v2.0.1 - credentials: - username: ${{secrets.registry_user}} - password: ${{secrets.registry_password}} - steps: - - uses: rtCamp/action-cleanup@master - if: ${{ inputs.is_nightly_run == false }} - - - name: Checkout - uses: actions/checkout@v3 - if: ${{ inputs.is_nightly_run == false }} - - - name: Agent info - if: ${{ inputs.is_nightly_run == false }} - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: unstash deploy_artifacts_noetic - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts - - - name: unstash manifest - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: manifest - path: . - - name: unstash sim_configs - if: ${{ inputs.with_simulation == 'true' && inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: sim_configs - path: simulator_artifacts - - name: unstash project metadata - if: ${{ inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: project_metadata - path: . - - - name: unstash deploy_simulator_artifacts - if: ${{ inputs.with_simulation == 'true' && inputs.is_nightly_run == false }} - uses: actions/download-artifact@v3 - with: - name: deploy_simulator_artifacts - path: . - - - name: Install CI Scripts - if: ${{ inputs.is_nightly_run == false }} - shell: bash - run: | - python3 -m venv ci_scripts - source ci_scripts/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - deactivate - - - name: Install Package Deployer - if: ${{ inputs.is_nightly_run == false }} - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: Login to ${{ env.MID_REGISTRY }} Registry - if: ${{ inputs.is_nightly_run == false }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.MID_REGISTRY }} - - - name: Login to ${{ env.PUSH_REGISTRY }} Registry - if: ${{ inputs.is_nightly_run == false }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.PUSH_REGISTRY }} - - - name: Publish simulator state - if: ${{ needs.Install-Simulator-Robot.outputs.skip_simulator == 'false' && inputs.with_simulation == 'true' && inputs.is_nightly_run == false }} - shell: bash - run: | - source ci_scripts/bin/activate - cd simulator_artifacts - integration-pipeline publish_simulator_state_artifacts \ - --product_name ${{ inputs.product_name }} \ - --branch ${GITHUB_REF#refs/heads/} - deactivate - - name: Publish and create release - if: ${{ inputs.is_nightly_run == false }} - id: bump - shell: bash - run: | - git config --global --add safe.directory $(pwd) - git config --global user.name '${{ secrets.auto_commit_user }}' - git config --global user.email '${{ secrets.auto_commit_mail }}' - git config --global user.password ${{ secrets.auto_commit_pwd }} - - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - mkdir -p deployment_artifacts - - source ci_scripts/bin/activate - ls -la - mkdir -p pkgs_deployable - cp artifacts/*deployable.dploy pkgs_deployable - package-deployer join --dploy_workspace "$(pwd)/pkgs_deployable" - mv "$(pwd)/pkgs_deployable/merged.dploy" deployment_artifacts/deployable.dploy - - mkdir -p pkgs_undeployable - cp artifacts/*3rdParty.dploy pkgs_undeployable - package-deployer join --dploy_workspace "$(pwd)/pkgs_undeployable" - mv "$(pwd)/pkgs_undeployable/merged.dploy" deployment_artifacts/3rdParty.dploy - - rm -rf pkgs_deployable pkgs_undeployable - - cp artifacts/*.json deployment_artifacts - if [ "${{ inputs.with_simulation }}" = "true" ]; - then - echo -e "$(cat ./artifacts/product-noetic.image.artifact)\n$(cat ./simulator.image.artifact)" > deployment_artifacts/product.image.artifact - else - cp ./artifacts/product-noetic.image.artifact deployment_artifacts/product.image.artifact - fi - #cp ./artifacts/product-noetic.image.artifact deployment_artifacts/product.image.artifact - cp deployment_artifacts/product.image.artifact ./ - - SAVEIFS=$IFS - IFS=$'\n' - images=($(cat product.image.artifact)) - IFS=$SAVEIFS # Restore original IFS - - for image in "${images[@]}" - do - source=$(echo $image | sed "s-/qa/-/ci/-g" | sed "s-${{ env.PUSH_REGISTRY }}-${{ env.MID_REGISTRY }}-g" ) - target=$(echo $image | sed "s-${{ env.MID_REGISTRY }}-${{ env.PUSH_REGISTRY }}-g" ) - docker pull $source - echo "tagging $source as $target" - docker tag $source $target - docker push $target - done - - integration-pipeline patch_manifest_with_spawner - cat product-manifest.yaml - cp product-manifest.yaml deployment_artifacts - - cp metadata.tar.gz deployment_artifacts - - product_version=$(cat product.version) - # danger zone. Everything will be deleted. - git restore product.version - git restore product-manifest.yaml - git pull - echo "$product_version" > product.version - - git add product.version - git commit -m "[skip actions] Automatic Raise" - - echo "version=${product_version}" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare raise variables - if: ${{ inputs.is_nightly_run == false }} - id: pre_raise - run: | - #echo ::set-output name=branch::${GITHUB_REF#refs/heads/} - echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT - - - name: Raise App version - if: ${{ inputs.is_nightly_run == false }} - uses: CasperWA/push-protected@v2.14.0 - with: - token: ${{ secrets.auto_commit_pwd }} - branch: ${{ steps.pre_raise.outputs.branch }} - unprotect_reviews: true - - - name: Github Publish - if: ${{ inputs.is_nightly_run == false }} - shell: bash - run: | - commit_hash=$(git log --format="%H" -n 1) - product_version=$(cat product.version) - gh release create -p --generate-notes --target "$commit_hash" -t "${{ inputs.product_name }} $product_version" $product_version - # add all files in the deployment_artifacts folder - find deployment_artifacts -type f -exec gh release upload $product_version {} \; - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Update release notes - shell: bash - if: ${{ inputs.is_nightly_run == false }} - run: | - # release version - product_version=$(cat product.version) - - # get existent release body - ORIGINAL_RN=$(gh release view "${product_version}" --json body | jq -r .body) - echo -e "ORIGINAL_RN:\n ${ORIGINAL_RN}" - - # get release PRs - PRS=$(echo "${ORIGINAL_RN}" | sed -rn "s/.* by @.* in https:\/\/github\.com\/${{ github.repository_owner }}\/${{ github.event.repository.name }}\/pull\/([0-9]+).*/\1/p" | tr '\n' ' ') - # change to array - PRS=($PRS) - echo "Found the following PRs: ${PRS[@]}" - - # new release notes file - rm -rf notes.txt - - # What's Changed - with info from PRs - echo "## What's Changed" >> notes.txt - - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No relevant changes." >> notes.txt - else - # PRs exist - for pr in "${PRS[@]}"; do - gh pr view "${pr}" --json body | jq -r .body >> notes.txt - done - fi - echo "" >> notes.txt - - # PRs - echo "## PRs" >> notes.txt - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No PRs." >> notes.txt - else - # PRs exist - echo "${ORIGINAL_RN}" | grep "\* .* by @.* in https://github.com/${{ github.repository_owner }}/" >> notes.txt - fi - echo "" >> notes.txt - - ## Diff - echo "## Diff" >> notes.txt - echo "${ORIGINAL_RN}" | grep "\*\*Full Changelog\*\*" >> notes.txt - - # set new release notes - gh release edit "${product_version}" --notes-file notes.txt - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Propagate release - continue-on-error: true - if: ${{ inputs.propagate_project && inputs.is_nightly_run == false }} - shell: bash - run: | - gh workflow run "Propagate base project dependency to projects - On Dispatch" \ - --repo MOV-AI/qa-automations \ - -f repo_name=${GITHUB_REPOSITORY#*/} \ - -f repo_version=${{ steps.bump.outputs.version }} - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack - shell: bash - run: | - MESSAGE=":white_check_mark: CI: ${GITHUB_REPOSITORY} (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is stable :sunny: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - - if [ "${{ inputs.is_nightly_run }}" == "true" ] ; then - MESSAGE=":white_check_mark: NIGHTLY: ${GITHUB_REPOSITORY} ${{inputs.nightly_run_branch}}, (Attempt: #${{ github.run_attempt }}) is stable :sunny: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - fi - echo "msg=$MESSAGE" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg }} - slack-update-message-ts: ${{ needs.Install-Simulator-Robot.outputs.slack_thread_id }} - - Run-Status: - runs-on: ubuntu-20.04 - needs: [publish, Validate-boostrap-configs] - if: ${{ always() && ( needs.publish.result == 'failure' || needs.publish.result == 'cancelled' || needs.publish.result == 'skipped' ) }} - steps: - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: Copy product configs - shell: bash - run: | - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - - - name: Prepare slack variables - id: pre_slack - shell: bash - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is unstable (or canceled) :rain_cloud: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - - if [ "${{ inputs.is_nightly_run }}" == "true" ] ; then - MESSAGE_ERR=":x: NIGHTLY: ${GITHUB_REPOSITORY} ${{inputs.nightly_run_branch}}, (Attempt: #${{ github.run_attempt }}) is unstable (or canceled) :rain_cloud: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - fi - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg_error }} - slack-update-message-ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} + TF_VAR_hostpci_device_xvga: "true" \ No newline at end of file From ada51850ae5a6619d31e774722506d69d3a73469 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 16:31:51 +0000 Subject: [PATCH 23/78] test deploy+++++ --- .../workflows/integration-build-product.yml | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 5328a73e..d5179176 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-7" + PROVISION_INFRA_VERSION: "0.0.0-8" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel @@ -692,10 +692,10 @@ jobs: terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve - terraform refresh ip=$(terraform output manager_ip_address) if [ -z "$ip" ]; then echo "Error: IP address not found." + terraform exit 1 else echo "IP address found: $ip" @@ -707,7 +707,7 @@ jobs: env: TF_VAR_number_agents: 0 TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_list: '["dhcp"]' + TF_VAR_ip_main: "dhcp" TF_VAR_storage: "local-lvm" TF_VAR_proxmox_host_list: '["mary"]' #TF_VAR_vm_gateway: "10.10.1.254" @@ -725,7 +725,7 @@ jobs: TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} TF_VAR_vm_core_type: "host" - TF_VAR_vm_disk_size: 20 + TF_VAR_vm_disk_size: 110 TF_VAR_vm_disk_interface: "scsi0" TF_VAR_vm_type: "q35" TF_VAR_vm_os_type: "l26" @@ -739,7 +739,7 @@ jobs: TF_VAR_hostpci_device_id: '["0000:01:00.0"]' TF_VAR_hostpci_device_pcie: "true" TF_VAR_hostpci_device_xvga: "true" - TF_VAR_sleep: "70" + TF_VAR_sleep: "30" # TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs @@ -1139,7 +1139,7 @@ jobs: TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_list: '["dhcp"]' + TF_VAR_ip_main: "dhcp" TF_VAR_proxmox_host_list: '["mary"]' #TF_VAR_vm_gateway: "10.10.1.254" # TF_VAR_ip_mask: 23 @@ -1151,8 +1151,8 @@ jobs: TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} TF_VAR_fleet_manager_memory: 30000 TF_VAR_fleet_manager_cores: 10 - TF_VAR_fleet_manager_disk_size: "110G" - TF_VAR_sleep: "50" + TF_VAR_fleet_manager_disk_size: "110" + TF_VAR_sleep: "20" TF_VAR_vm_core_type: "host" TF_VAR_vm_disk_interface: "scsi0" TF_VAR_vm_type: "q35" @@ -1162,7 +1162,8 @@ jobs: TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" TF_VAR_vm_bios: "ovmf" TF_VAR_cloud_init_storage: "local" - TF_VAR_hostpci_device: "hostpci0" - TF_VAR_hostpci_device_id: "0000:01:00.0" + TF_VAR_hostpci_device: '["hostpci0"]' + TF_VAR_hostpci_device_id: '["0000:01:00.0"]' TF_VAR_hostpci_device_pcie: "true" - TF_VAR_hostpci_device_xvga: "true" \ No newline at end of file + TF_VAR_hostpci_device_xvga: "true" + From 17b510967a0866a9150c4581c071e193ec1eb7c7 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 16:55:05 +0000 Subject: [PATCH 24/78] test deploy++++++ --- .github/workflows/integration-build-product.yml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index d5179176..fc308838 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -692,14 +692,7 @@ jobs: terraform init -backend-config="key=mary-standalone-${{ steps.infra_names.outputs.simul_prefix }}.tfstate" terraform plan terraform apply -auto-approve - ip=$(terraform output manager_ip_address) - if [ -z "$ip" ]; then - echo "Error: IP address not found." - terraform - exit 1 - else - echo "IP address found: $ip" - fi + terraform refresh echo "${{ secrets.ssh_priv_key }}" > ~/.ssh/ci_priv_key.pem sudo chmod 600 ~/.ssh/ci_priv_key.pem From b54950cd0d555fe10f044bc29f8aba22ce6312a1 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 16:58:04 +0000 Subject: [PATCH 25/78] test deploy+++++++ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index fc308838..981706d4 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -732,7 +732,7 @@ jobs: TF_VAR_hostpci_device_id: '["0000:01:00.0"]' TF_VAR_hostpci_device_pcie: "true" TF_VAR_hostpci_device_xvga: "true" - TF_VAR_sleep: "30" + TF_VAR_sleep: "40" # TF_VAR_hostpci_device_rombar: "true" - name: Gather Terraform outputs From e8a5d4556013d50532b93be7601fb8e212f3743e Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 17:39:41 +0000 Subject: [PATCH 26/78] test deploy++++++++ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 981706d4..22640392 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-8" + PROVISION_INFRA_VERSION: "0.0.0-9" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel From cf72e18e88c117a4e53ef4d42e0612d5421dc4d8 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 17:56:14 +0000 Subject: [PATCH 27/78] test deploy+++++++++ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 22640392..62178d4f 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1138,7 +1138,7 @@ jobs: # TF_VAR_ip_mask: 23 TF_VAR_bios: "ovmf" TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-simul-ci" + TF_VAR_tags: '["ip-simul-ci"]' TF_VAR_fleet_hosts_user: "devops" # TF_VAR_template_name: "u22dci-gpu" TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} From 9cc09cd1c9cbe95ddf4fc52eb1a53a0d343e71c3 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Tue, 19 Mar 2024 19:00:12 +0000 Subject: [PATCH 28/78] update vga --- .github/workflows/integration-build-product.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 62178d4f..0d95f019 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -101,7 +101,7 @@ env: USERSPACE_FOLDER_PATH: userspace REMOTE_WORKSPACE_PATH: workspace PROVISION_INFRA_REPO: "devops-tf-proxmox-bpg" - PROVISION_INFRA_VERSION: "0.0.0-9" + PROVISION_INFRA_VERSION: "0.0.0-10" # slack channel movai-projects #SLACK_CHANNEL: ${{ inputs.overwrite_slack_channel }} # development slack channel @@ -922,7 +922,8 @@ jobs: run: | ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' set -e - + wget https://download.nomachine.com/download/8.11/Linux/nomachine_8.11.3_4_amd64.deb + sudo dpkg -i nomachine_8.11.3_4_amd64.deb export DISPLAY="$(w -oush | grep -Eo " :[0-9]+" | uniq | cut -d \ -f 2)" echo "Display detected (dinamic) is $DISPLAY" export DISPLAY=":0" From 95b8045b74f27e20218941e43fe39938015dfe1c Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 10:20:35 +0000 Subject: [PATCH 29/78] debug quikstart --- .../workflows/integration-build-product.yml | 76 +++++++++---------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 0d95f019..17aa6c08 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -1122,42 +1122,42 @@ jobs: AWS_DEFAULT_OUTPUT: "none" - - name: Teardown remote vms (Proxmox) - working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} - if: ${{ inputs.with_simulation_tests && inputs.debug_simulation_tests_keep_alive == false && always() }} - shell: bash - run: terraform destroy -auto-approve - env: - TF_VAR_number_agents: 0 - TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} - TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} - TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} - TF_VAR_ip_main: "dhcp" - TF_VAR_proxmox_host_list: '["mary"]' - #TF_VAR_vm_gateway: "10.10.1.254" - # TF_VAR_ip_mask: 23 - TF_VAR_bios: "ovmf" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: '["ip-simul-ci"]' - TF_VAR_fleet_hosts_user: "devops" - # TF_VAR_template_name: "u22dci-gpu" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} - TF_VAR_fleet_manager_memory: 30000 - TF_VAR_fleet_manager_cores: 10 - TF_VAR_fleet_manager_disk_size: "110" - TF_VAR_sleep: "20" - TF_VAR_vm_core_type: "host" - TF_VAR_vm_disk_interface: "scsi0" - TF_VAR_vm_type: "q35" - TF_VAR_vm_os_type: "l26" - TF_VAR_vm_network_bridge: "vmbr0" - TF_VAR_vm_disk_storage: "nas-mary" - TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" - TF_VAR_vm_bios: "ovmf" - TF_VAR_cloud_init_storage: "local" - TF_VAR_hostpci_device: '["hostpci0"]' - TF_VAR_hostpci_device_id: '["0000:01:00.0"]' - TF_VAR_hostpci_device_pcie: "true" - TF_VAR_hostpci_device_xvga: "true" + # - name: Teardown remote vms (Proxmox) + # working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} + # if: ${{ inputs.with_simulation_tests && inputs.debug_simulation_tests_keep_alive == false && always() }} + # shell: bash + # run: terraform destroy -auto-approve + # env: + # TF_VAR_number_agents: 0 + # TF_VAR_proxmox_api_url: ${{ secrets.proxmox_api_url }} + # TF_VAR_proxmox_ve_username: ${{ secrets.proxmox_ve_username }} + # TF_VAR_proxmox_ve_password: ${{ secrets.proxmox_ve_password }} + # TF_VAR_provision_ssh_pem: ${{ secrets.ssh_priv_key }} + # TF_VAR_ip_main: "dhcp" + # TF_VAR_proxmox_host_list: '["mary"]' + # #TF_VAR_vm_gateway: "10.10.1.254" + # # TF_VAR_ip_mask: 23 + # TF_VAR_bios: "ovmf" + # TF_VAR_pool: "IP-Temp-VMs" + # TF_VAR_tags: '["ip-simul-ci"]' + # TF_VAR_fleet_hosts_user: "devops" + # # TF_VAR_template_name: "u22dci-gpu" + # TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.simul_prefix }} + # TF_VAR_fleet_manager_memory: 30000 + # TF_VAR_fleet_manager_cores: 10 + # TF_VAR_fleet_manager_disk_size: "110" + # TF_VAR_sleep: "20" + # TF_VAR_vm_core_type: "host" + # TF_VAR_vm_disk_interface: "scsi0" + # TF_VAR_vm_type: "q35" + # TF_VAR_vm_os_type: "l26" + # TF_VAR_vm_network_bridge: "vmbr0" + # TF_VAR_vm_disk_storage: "nas-mary" + # TF_VAR_vm_img_id: "nas-mary:iso/0.0.1-13-desktop-jammy_local.img" + # TF_VAR_vm_bios: "ovmf" + # TF_VAR_cloud_init_storage: "local" + # TF_VAR_hostpci_device: '["hostpci0"]' + # TF_VAR_hostpci_device_id: '["0000:01:00.0"]' + # TF_VAR_hostpci_device_pcie: "true" + # TF_VAR_hostpci_device_xvga: "true" From 4a6e87473e359b52fc53a79b1681b2550a8b0555 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 10:24:31 +0000 Subject: [PATCH 30/78] debug quikstart+ --- .github/workflows/integration-build-product.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 17aa6c08..609e8af7 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -948,6 +948,7 @@ jobs: export PUBLIC_IP=$(hostname -I | awk "{print $1}") export SIMULATION_ID="CI" rm -rf userspace + mkdir userspace wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash chmod +x ./QuickStart_$(cat quickstart_version).bash From c77270d8fbfe6df8365f6b3e57f7389160653df5 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 16:01:02 +0000 Subject: [PATCH 31/78] fix quikstart --- .github/workflows/integration-build-product.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 609e8af7..bf4b51ad 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -944,11 +944,13 @@ jobs: integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" + export USERSPACE_FOLDER_PATH="/opt/movai/robots/userspace" export PUBLIC_IP=$(hostname -I | awk "{print $1}") export SIMULATION_ID="CI" - rm -rf userspace - mkdir userspace + rm -rf $USERSPACE_FOLDER_PATH + mkdir -P $USERSPACE_FOLDER_PATH + sudo chmod 777 $USERSPACE_FOLDER_PATH + sudo chmod -R 777 /opt/movai wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash chmod +x ./QuickStart_$(cat quickstart_version).bash From a37c13f55bc43acf884dfb03a02db5c380f780cf Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 16:03:16 +0000 Subject: [PATCH 32/78] fix quikstart+ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index bf4b51ad..2e8f8a7f 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -948,7 +948,7 @@ jobs: export PUBLIC_IP=$(hostname -I | awk "{print $1}") export SIMULATION_ID="CI" rm -rf $USERSPACE_FOLDER_PATH - mkdir -P $USERSPACE_FOLDER_PATH + mkdir -P $USERSPACE_FOLDER_PATH/.git/ sudo chmod 777 $USERSPACE_FOLDER_PATH sudo chmod -R 777 /opt/movai From a27554cc7db1ac081b3d249ffc15e49a7d01f770 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 16:04:06 +0000 Subject: [PATCH 33/78] fix quikstart++ --- .github/workflows/integration-build-product.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 2e8f8a7f..016d1455 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -949,7 +949,6 @@ jobs: export SIMULATION_ID="CI" rm -rf $USERSPACE_FOLDER_PATH mkdir -P $USERSPACE_FOLDER_PATH/.git/ - sudo chmod 777 $USERSPACE_FOLDER_PATH sudo chmod -R 777 /opt/movai wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash From 53d19e0c6c0cabd7199f034eb76e4d7b6ea97c0e Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 16:19:13 +0000 Subject: [PATCH 34/78] fix quikstart++ --- .github/workflows/integration-build-product.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index 016d1455..addec111 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -948,7 +948,7 @@ jobs: export PUBLIC_IP=$(hostname -I | awk "{print $1}") export SIMULATION_ID="CI" rm -rf $USERSPACE_FOLDER_PATH - mkdir -P $USERSPACE_FOLDER_PATH/.git/ + mkdir -p $USERSPACE_FOLDER_PATH/.git/ sudo chmod -R 777 /opt/movai wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash From cbe2ddf5ce88f2ec200f8cae0a4be1f3bd70c862 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 16:36:09 +0000 Subject: [PATCH 35/78] fix quikstart+++ --- .github/workflows/integration-build-product.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index addec111..efa16efe 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -947,8 +947,8 @@ jobs: export USERSPACE_FOLDER_PATH="/opt/movai/robots/userspace" export PUBLIC_IP=$(hostname -I | awk "{print $1}") export SIMULATION_ID="CI" - rm -rf $USERSPACE_FOLDER_PATH - mkdir -p $USERSPACE_FOLDER_PATH/.git/ + sudo rm -rf $USERSPACE_FOLDER_PATH + sudo mkdir -p $USERSPACE_FOLDER_PATH/.git/ sudo chmod -R 777 /opt/movai wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash From 7cae004ffe5985eb5865fa57c33c7c5ae834fae2 Mon Sep 17 00:00:00 2001 From: Denis Borzenkov Date: Wed, 20 Mar 2024 17:41:57 +0000 Subject: [PATCH 36/78] test display --- .github/workflows/integration-build-product.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-product.yml b/.github/workflows/integration-build-product.yml index efa16efe..da98f3b8 100644 --- a/.github/workflows/integration-build-product.yml +++ b/.github/workflows/integration-build-product.yml @@ -974,10 +974,10 @@ jobs: run: | ssh ${{ steps.infra_outputs.outputs.ssh_connect_string }} -i ~/.ssh/ci_priv_key.pem -o StrictHostKeyChecking=no ' set -e - + sleep 40 export DISPLAY="$(w -oush | grep -Eo " :[0-9]+" | uniq | cut -d \ -f 2)" echo "Display detected (dinamic) is $DISPLAY" - export DISPLAY=":0" + export DISPLAY=":1" echo "Display detected is $DISPLAY" xhost +local:docker From 85839448b9d09f47db6fc255da99285ba0525f8e Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Fri, 22 Mar 2024 17:21:27 +0000 Subject: [PATCH 37/78] have fleet validations --- .../workflows/integration-build-platform.yml | 2324 ++--------------- 1 file changed, 250 insertions(+), 2074 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 271ccddd..fcf7f33c 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -72,9 +72,9 @@ env: JIRA_USERNAME: ${{ secrets.jira_username}} JIRA_PASSWORD: ${{ secrets.jira_password}} # slack channel rd-platform - SLACK_CHANNEL: "C02U028NMB7" + #SLACK_CHANNEL: "C02U028NMB7" # development slack channel - #SLACK_CHANNEL: "C05K2KF1UP8" + SLACK_CHANNEL: "C05K2KF1UP8" jobs: Validate-boostrap-configs: @@ -170,20 +170,9 @@ jobs: path: platform_configs/* retention-days: 5 - Standalone-Validations: - runs-on: ubuntu-20.04 + Fleet-Validations: needs: [Validate-boostrap-configs] - outputs: - slack_thread_id: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - steps: - - name: Pass through - run: echo "Pass" - - Validation-UI-Tests: - needs: [Standalone-Validations] runs-on: integration-pipeline - outputs: - slack_thread_id: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} steps: - name: Cleanup Workspace uses: rtCamp/action-cleanup@master @@ -204,86 +193,104 @@ jobs: python3 -m pip install pyopenssl --upgrade python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - name: unstash robot_configs uses: actions/download-artifact@v3 with: name: robot_configs path: . - - name: Patch robot_configs *.ci with the right full path + - name: Provision remote vms (AWS) + if: ${{ false }} shell: bash run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; + mkdir aws_artifacts + python3 -m pip install awscli + cd staging + export PATH="$HOME/.local/bin:$PATH" + export product="platform" + export version="$PRODUCT_RELEASE_VERSION" + ./ec2_provision.sh + cp -vf infra_ids.txt ../aws_artifacts/ + + - name: Stash ci_infra_artifacts (AWS) + if: ${{ false }} + uses: actions/upload-artifact@v3 + with: + name: ci_infra_artifacts + path: aws_artifacts/* + retention-days: 5 - - name: Setup QA UI tests - id: ui_tests_setup + - name: Install terraform shell: bash run: | - qa_key=ui_tests + wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg + echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list + sudo apt update && sudo apt install terraform -y - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/jira_report.txt /tmp/test_set.txt + - name: Setup terraform proxmox provisioner + id: provision_infra_setup + shell: bash + run: | + provision_infra_dir=provision_scripts + provision_infra_version=0.0.0-10 + provision_infra_repo_name=devops-tf-proxmox-bpg + rm -rf $provision_infra_dir export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.jira_report --output_file /tmp/jira_report.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - jira_report=$(cat /tmp/jira_report.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "jira_report=${jira_report}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT + integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir + ls -la $provision_infra_dir + echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate + - name: Define Instance names + id: infra_names + shell: bash + run: | + branch=$(echo ${GITHUB_REF#refs/heads/} | sed "s;\.;-;g" ) - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic + local_manager_prefix="ip-$branch-manager" + local_worker_prefix="ip-$branch-worker" + echo "$local_manager_prefix" + echo "$local_worker_prefix" - popd + echo "manager_prefix=${local_manager_prefix}" >> $GITHUB_OUTPUT + echo "worker_prefix=${local_worker_prefix}" >> $GITHUB_OUTPUT - - name: Feature File Validation - id: feature_file_ui - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + - name: Provision remote vms (Proxmox) + working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - . venv/bin/activate - - xray download ${{ steps.ui_tests_setup.outputs.test_set }} - xray compare ./tests/feature/ + terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" + terraform plan + terraform apply -auto-approve + terraform refresh + env: + TF_VAR_number_agents: ${{ inputs.fleet_number_members }} + TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" + TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} + TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} + TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} + TF_VAR_ip_list: ${{ inputs.fleet_ips }} + TF_VAR_proxmox_host: "hel" + TF_VAR_vm_gateway: "172.22.0.1" + TF_VAR_ip_mask: 24 + TF_VAR_bios: "seabios" + TF_VAR_pool: "IP-Temp-VMs" + TF_VAR_tags: "ip-fleet" - deactivate + TF_VAR_fleet_hosts_user: "devops" + TF_VAR_template_name: "ubuntu-2004-cloudinit-template2" + TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.manager_prefix }} + TF_VAR_fleet_manager_memory: 8192 + TF_VAR_template_name_no_gpu: "ubuntu-2004-cloudinit-template2" + TF_VAR_fleet_worker_name_prefix: ${{ steps.infra_names.outputs.worker_prefix }} + TF_VAR_fleet_worker_memory: 8192 - - name: Prepare QA Feature File Validation slack message + - name: Prepare Devops provisioning slack message if: always() - id: pre_slack + id: pre_slack_infra run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is unstable :rain_cloud: \ - ${{ github.job }} feature file validation: ${{ steps.feature_file_ui.outcome }} \ + MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is being impacted by an infrastructural issue. \ + Provisioning of fleet infrastructure failed. Please take a look! \ Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT @@ -291,96 +298,171 @@ jobs: if: failure() uses: slackapi/slack-github-action@v1.23.0 with: - channel-id: "C02PB9A9F45" - slack-message: ${{ steps.pre_slack.outputs.msg_error }} + channel-id: "G0102LEV1CL" + slack-message: ${{ steps.pre_slack_infra.outputs.msg_error }} env: SLACK_BOT_TOKEN: ${{ secrets.slack_token_id }} - - name: Install - id: install + - name: Apply ansible inventory shell: bash run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true + cp ${{ steps.provision_infra_setup.outputs.target_dir }}/hosts staging/hosts + export PATH="$HOME/.local/bin:$PATH" + integration-pipeline get_yml_value --file staging/hosts --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt - rm -rf artifacts - mkdir -p artifacts - cp *.json artifacts/ + - name: Setup ansible installation + id: ansible_install_setup + shell: bash + run: | + install_key=ansible_deploy - CONFIG_FILE_NAME="basic-standalone-noetic.json" + rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.target_dir --output_file /tmp/target_dir.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.version --output_file /tmp/version.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.name --output_file /tmp/repo_name.txt + install_infra_dir=$(cat /tmp/target_dir.txt) + install_infra_version=$(cat /tmp/version.txt) + install_infra_repo_name=$(cat /tmp/repo_name.txt) + + rm -rf $install_infra_repo_name + integration-pipeline fetch_by_tag --repo $install_infra_repo_name --version $install_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $install_infra_dir + ls -la $install_infra_dir + echo "target_dir=${install_infra_dir}" >> $GITHUB_OUTPUT + + - name: Ansible install platform + id: ansible_install_platform + working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} + shell: bash + run: | + + function ensure_agent_up(){ + vm_ip=$1 + i="0" + max=15 + success=1 + while [ $success -ne 0 ] + do + echo "Checking if $vm_ip is reachable ($i/$max)" + ping -c1 $vm_ip &>/dev/null + success=$? + + if [ $i -lt $max ] + then + i=$[$i+1] + else + echo "Timeout waiting for $vm_ip" + exit 2 + fi + + sleep 2 + done - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME + } + echo "${{ secrets.ssh_pem_fleet_aws_vm }}" > ~/.ssh/aws_slave.pem + sudo chmod 600 ~/.ssh/aws_slave.pem + while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 ; do echo Waiting for other software managers to finish... ; sleep 5;done + sudo apt install -y python3.9 python3.9-venv + python3.9 -m venv ansible-venv + source ansible-venv/bin/activate + python3 -m pip install -r requirements.txt + ansible-galaxy install -r requirements.yml --timeout 120 - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" + stripped_ips=$(echo ${{ inputs.fleet_ips }} | sed "s;\[;;g" | sed "s;];;g" | sed "s; ;;g") + touch ~/.ssh/known_hosts + sudo chmod 600 ~/.ssh/known_hosts + IFS=',' read -r -a stripped_ips_arr <<< $stripped_ips + manager_ip=${stripped_ips_arr[0]} + echo $manager_ip + echo "manager_ip=${manager_ip}" >> $GITHUB_OUTPUT + for ip in "${stripped_ips_arr[@]}" + do + ensure_agent_up $ip + ssh-keygen -f ~/.ssh/known_hosts -R $ip + ssh-keyscan -H $ip >> ~/.ssh/known_hosts done - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT + ansible-playbook install.yml \ + -i ../staging/hosts \ + --key-file ~/.ssh/aws_slave.pem \ + --extra-vars=@"$(pwd)/.."/product-manifest.yaml \ + -e fleet_domain_dns="" \ + -e "{\"proxycerts__remote_redis_servers_fqn\": [$(cat ../staging/manager_private_ip.txt)]}" \ + -e '{"fleet_extra_hosts": ["172.22.0.106 registry.hel.mov.ai traefik"]}' \ + --skip-tags "validate,ufw,hardening" + execution_status=$? + deactivate + exit $execution_status - - name: Install dependencies in spawner - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + - name: Setup QA API tests + id: api_tests_setup shell: bash run: | - # install test dependencies on spawner - if [ -f apt-requirements.txt ]; then - ## get spawner container name - CONTAINER_ID=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - ## get apt dependencies - APT_DEPS=$(cat apt-requirements.txt | tr "\n" " ") - ## install - docker exec -t "${CONTAINER_ID}" bash -c " - sudo apt update - sudo apt install -y ${APT_DEPS} - " - fi - - - name: UI tests - timeout-minutes: 120 - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + qa_key=api_tests + + rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt + export PATH="$HOME/.local/bin:$PATH" + + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt + integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt + + tests_dir=$(cat /tmp/target_dir.txt) + tests_version=$(cat /tmp/version.txt) + tests_repo_name=$(cat /tmp/repo_name.txt) + + rm -rf $tests_repo_name + integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir + ls -la $tests_dir + + echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT + + # setup venv in a step that is always executed + pushd "${tests_dir}" + rm -rf venv + python3 -m venv venv + . venv/bin/activate + python3 -m pip install pip --upgrade + python3 -m pip install pyopenssl --upgrade + pip install -r requirements.txt + deactivate + popd + + - name: API tests + timeout-minutes: 30 + working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} shell: bash run: | + # install test dependencies on host + sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" . venv/bin/activate - pytest \ - -ra \ - --hub_url http://selenoid-ui.hel.mov.ai \ - --base_url https://${{ steps.agent_info.outputs.ip }}/ \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - --cucumberjson=./results.json + pytest src \ + --movai-ip ${{ steps.ansible_install_platform.outputs.manager_ip }} \ + --movai-user admin \ + --movai-pw admin@123 \ + -m fleet deactivate - - name: Create Xray test execution + - name: Save docker container logs if: always() - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} + working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} + shell: bash run: | - export PATH="$HOME/.local/bin:$PATH" + # for sanity + docker ps -a - # get platform version - rm -f /tmp/version.txt - integration-pipeline get_yml_value --file ../product-manifest.yaml --key version --output_file /tmp/version.txt - plat_version=$(cat /tmp/version.txt) + for container in backend spawner messager-server; do + CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^${container}-.*") + docker logs "${CONTAINER_ID}" &> "${container}.log" || true + done || true + + # movai-service + journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - # create test execution - if [ "${{ steps.ui_tests_setup.outputs.jira_report }}" == "True" ] ; then - . venv/bin/activate - xray create ./results.json --version "${plat_version}" --label UI_Automation - deactivate - fi - name: Get current job id if: always() @@ -400,7 +482,7 @@ jobs: if: always() id: pre_slack_result run: | - MESSAGE=":white_check_mark:${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" + MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT @@ -412,7 +494,7 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - name: Slack message failure uses: archive/github-actions-slack@master @@ -422,1962 +504,56 @@ jobs: slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} slack-channel: ${{ env.SLACK_CHANNEL }} slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} + slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - name: Save docker container logs + - name: Collect Fleet QA artifacts + working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} if: always() - working-directory: ${{ steps.ui_tests_setup.outputs.target_dir }} shell: bash + env: + API_DIR: ${{ steps.api_tests_setup.outputs.target_dir }} run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - UI_DIR: ${{ steps.ui_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log and *.zip might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${UI_DIR}"/*.log ./qa_artifacts || true - cp -r "${UI_DIR}"/*.tar ./qa_artifacts || true - cp -r "${UI_DIR}"/*.json ./qa_artifacts || true - cp -r "${UI_DIR}"/*.html ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_ui_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validation-Install-Tests: - needs: [Standalone-Validations] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Patch robot_configs *.ci with the right full path - shell: bash - run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; - - - name: Setup QA install tests - id: install_tests_setup - shell: bash - run: | - qa_key=install_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/jira_report.txt /tmp/test_set.txt - - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.jira_report --output_file /tmp/jira_report.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - jira_report=$(cat /tmp/jira_report.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "jira_report=${jira_report}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Feature File Validation - id: feature_file_install - working-directory: ${{ steps.install_tests_setup.outputs.target_dir }} - shell: bash - run: | - . venv/bin/activate - - python3 testcasemanagement/testcase_importer.py --target "${{ steps.install_tests_setup.outputs.test_set }}" - python3 testcasemanagement/feature_file_processor.py --validate - - deactivate - - - name: Prepare QA Feature File Validation slack message - if: always() - id: pre_slack - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is unstable :rain_cloud: \ - ${{ github.job }} feature file validation: ${{ steps.feature_file_install.outcome }} \ - Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message failure - if: failure() - uses: slackapi/slack-github-action@v1.23.0 - with: - channel-id: "C02PB9A9F45" - slack-message: ${{ steps.pre_slack.outputs.msg_error }} - env: - SLACK_BOT_TOKEN: ${{ secrets.slack_token_id }} - - - name: Install tests - timeout-minutes: 45 - id: install - working-directory: ${{ steps.install_tests_setup.outputs.target_dir }} - shell: bash - run: | - export PATH="$HOME/.local/bin:$PATH" - - . venv/bin/activate - rm -rf results/* - - if [ "${{ steps.install_tests_setup.outputs.jira_report }}" == "True" ] ; then - pytest tests/ \ - -ra \ - -k '${{ steps.install_tests_setup.outputs.test_set }}' \ - --installPath="." --jsonConfigFilePath="../basic-standalone-noetic.json.ci" \ - --jira_report - else - pytest tests/ \ - -ra \ - -k '${{ steps.install_tests_setup.outputs.test_set }}' \ - --installPath="." --jsonConfigFilePath="../basic-standalone-noetic.json.ci" - fi - - deactivate - - user=$(cat results/credentials.txt | awk -F: '{print $1}') - pwd=$(cat results/credentials.txt | awk -F: '{print $2}') - - echo "movai_user=${user}" >> $GITHUB_OUTPUT - echo "movai_pwd=${pwd}" >> $GITHUB_OUTPUT - - - name: Run mobtest - shell: bash - run: | - container_id=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - docker exec -t "$container_id" bash -c ' - set -e - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple mobtest==${{ env.MOBTEST_VERSION }} --ignore-installed - mobtest proj /opt/ros/noetic/share/ - ' - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.install_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # Spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - INSTALL_DIR: ${{ steps.install_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${INSTALL_DIR}"/*.log ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/*.tar ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/results/*.log ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/results/*.zip ./qa_artifacts || true - cp -r "${INSTALL_DIR}"/results/test_report_*.html ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_install_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Collect Installed components - shell: bash - run: | - mkdir -p artifacts - - used_images=($(docker images --format "{{.Repository}}:{{.Tag}}" | tr ' ' "\n")) - for image in "${used_images[@]}" - do - image_short_name=$(grep -oP "(?<=/$ENV/).*?(?=:)" <<< "$image" || grep -oP "(?<=/devops/).*?(?=:)" <<< "$image" || true) - if [[ "$image_short_name" =~ .*"backend".* || "$image_short_name" =~ .*"spawner".* || "$image_short_name" =~ .*"redis"*.* || "$image_short_name" =~ .*"health-node".* || "" =~ .*"message-server*.*" ]]; - then - echo "scanning $image" - container_ids=($(docker ps -q -f "ancestor=$image" | tr ' ' "\n")) - for container_id in "${container_ids[@]}" - do - container_name=$(docker inspect --format="{{.Name}}" $container_id) - docker exec -t "$container_id" bash -c ' - set -e - - sudo apt update || apt update - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install --upgrade pip || wget https://bootstrap.pypa.io/get-pip.py -O - | python3 - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple movai-package-deployer==${{ env.PACKAGE_DEPLOYER_VERSION }} - package-deployer scan - ls -la /tmp - ' || true - docker cp $container_id:/tmp/deployable.dploy artifacts/$container_name-noetic-deployable.dploy - docker cp $container_id:/tmp/undeployable.dploy artifacts/$container_name-noetic-3rdParty.dploy - done - else - echo "Skipping scan of $image" - fi - done - export PATH="$HOME/.local/bin:$PATH" - package-deployer scan - cp /tmp/deployable.dploy artifacts/host-noetic-deployable.dploy - cp /tmp/undeployable.dploy artifacts/host-noetic-3rdParty.dploy - - - name: Stash deploy_artifacts_noetic - uses: actions/upload-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts/*.dploy - retention-days: 5 - - - name: Stash QA artifacts - if: always() - shell: bash - env: - INSTALL_DIR: ${{ steps.install_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validation-API-Tests: - needs: [Standalone-Validations] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Patch robot_configs *.ci with the right full path - shell: bash - run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; - - - name: Setup QA API tests - id: api_tests_setup - shell: bash - run: | - qa_key=api_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Install - id: install - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME="basic-standalone-noetic.json" - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" - done - - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT - - - name: API tests - timeout-minutes: 30 - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - . venv/bin/activate - - pytest src \ - --movai-ip ${{ steps.agent_info.outputs.ip }} \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - -m "not fleet" - - deactivate - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - - name: Stash QA artifacts - if: always() - shell: bash - env: - API_DIR: ${{ steps.api_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log and *.zip might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${API_DIR}"/*.log ./qa_artifacts || true - cp -r "${API_DIR}"/*.tar ./qa_artifacts || true - cp -r "${API_DIR}"/results/*.zip ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_api_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validation-Flow-Tests: - needs: [Standalone-Validations] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Patch robot_configs *.ci with the right full path - shell: bash - run: | - find -L . -type f -name '*.json.ci' -exec \ - sed -i "s;/__w;$(pwd)/../..;g" {} \ - \; - - - name: Setup QA Flow tests - id: flow_tests_setup - shell: bash - run: | - qa_key=flow_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/test_set.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "version=${tests_version}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Install - id: install - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - CONFIG_FILE_NAME="basic-standalone-noetic.json" - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" - done - - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT - execution_status=$? - exit $execution_status - rm movai_service_version - - - name: Flow tests - timeout-minutes: 30 - working-directory: ${{ steps.flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - # install test dependencies on spawner - if [ -f apt-requirements.txt ]; then - ## get spawner container name - CONTAINER_ID=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - ## get apt dependencies - APT_DEPS=$(cat apt-requirements.txt | tr "\n" " ") - ## install - docker exec -t "${CONTAINER_ID}" bash -c " - sudo apt update - sudo apt install -y ${APT_DEPS} - " - fi - - # run tests - . venv/bin/activate - - pytest \ - -s \ - -ra \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - -m '${{ steps.flow_tests_setup.outputs.test_set }}' \ - --tb=short - - deactivate - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Standalone-Validations.outputs.slack_thread_id }} - - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - FLOW_DIR: ${{ steps.flow_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts, they might not exist - mkdir -p qa_artifacts - cp -r "${FLOW_DIR}"/*.log ./qa_artifacts || true - cp -r "${FLOW_DIR}"/*.tar ./qa_artifacts || true - - - name: Stash QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: qa_artifacts_flow_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Validations-Finish: - needs: [Validation-UI-Tests, Validation-Install-Tests, Validation-API-Tests, Validation-Flow-Tests] - runs-on: ubuntu-20.04 - outputs: - slack_thread_id: ${{ needs.Validation-UI-Tests.outputs.slack_thread_id }} - steps: - - name: Pass through - run: echo "Pass" - - Fleet-Validations: - needs: [Validate-boostrap-configs] - runs-on: integration-pipeline - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Provision remote vms (AWS) - if: ${{ false }} - shell: bash - run: | - mkdir aws_artifacts - python3 -m pip install awscli - cd staging - export PATH="$HOME/.local/bin:$PATH" - export product="platform" - export version="$PRODUCT_RELEASE_VERSION" - ./ec2_provision.sh - cp -vf infra_ids.txt ../aws_artifacts/ - - - name: Stash ci_infra_artifacts (AWS) - if: ${{ false }} - uses: actions/upload-artifact@v3 - with: - name: ci_infra_artifacts - path: aws_artifacts/* - retention-days: 5 - - - name: Install terraform - shell: bash - run: | - wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg - echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list - sudo apt update && sudo apt install terraform -y - - - name: Setup terraform proxmox provisioner - id: provision_infra_setup - shell: bash - run: | - provision_infra_dir=provision_scripts - provision_infra_version=0.0.1-38 - provision_infra_repo_name=devops-tf-proxmox-fleet - - rm -rf $provision_infra_dir - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir - ls -la $provision_infra_dir - echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT - - - name: Define Instance names - id: infra_names - shell: bash - run: | - branch=$(echo ${GITHUB_REF#refs/heads/} | sed "s;\.;-;g" ) - - local_manager_prefix="ip-$branch-manager" - local_worker_prefix="ip-$branch-worker" - echo "$local_manager_prefix" - echo "$local_worker_prefix" - - echo "manager_prefix=${local_manager_prefix}" >> $GITHUB_OUTPUT - echo "worker_prefix=${local_worker_prefix}" >> $GITHUB_OUTPUT - - - name: Provision remote vms (Proxmox) - working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} - shell: bash - run: | - terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" - terraform plan - terraform apply -auto-approve - env: - TF_VAR_number_agents: ${{ inputs.fleet_number_members }} - TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} - TF_VAR_ip_list: ${{ inputs.fleet_ips }} - TF_VAR_proxmox_host: "hel" - TF_VAR_vm_gateway: "172.22.0.1" - TF_VAR_ip_mask: 24 - TF_VAR_bios: "seabios" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-fleet" - - TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "ubuntu-2004-cloudinit-template2" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.manager_prefix }} - TF_VAR_fleet_manager_memory: 8192 - TF_VAR_template_name_no_gpu: "ubuntu-2004-cloudinit-template2" - TF_VAR_fleet_worker_name_prefix: ${{ steps.infra_names.outputs.worker_prefix }} - TF_VAR_fleet_worker_memory: 8192 - - - name: Prepare Devops provisioning slack message - if: always() - id: pre_slack_infra - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY}, (${GITHUB_REF#refs/heads/}), build: $(cat product.version) is being impacted by an infrastructural issue. \ - Provisioning of fleet infrastructure failed. Please take a look! \ - Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message failure - if: failure() - uses: slackapi/slack-github-action@v1.23.0 - with: - channel-id: "G0102LEV1CL" - slack-message: ${{ steps.pre_slack_infra.outputs.msg_error }} - env: - SLACK_BOT_TOKEN: ${{ secrets.slack_token_id }} - - - name: Apply ansible inventory - shell: bash - run: | - cp ${{ steps.provision_infra_setup.outputs.target_dir }}/hosts staging/hosts - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file staging/hosts --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt - - - name: Setup ansible installation - id: ansible_install_setup - shell: bash - run: | - install_key=ansible_deploy - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.installion.$install_key.name --output_file /tmp/repo_name.txt - install_infra_dir=$(cat /tmp/target_dir.txt) - install_infra_version=$(cat /tmp/version.txt) - install_infra_repo_name=$(cat /tmp/repo_name.txt) - - rm -rf $install_infra_repo_name - integration-pipeline fetch_by_tag --repo $install_infra_repo_name --version $install_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $install_infra_dir - ls -la $install_infra_dir - echo "target_dir=${install_infra_dir}" >> $GITHUB_OUTPUT - - - name: Ansible install platform - id: ansible_install_platform - working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} - shell: bash - run: | - - function ensure_agent_up(){ - vm_ip=$1 - i="0" - max=15 - success=1 - while [ $success -ne 0 ] - do - echo "Checking if $vm_ip is reachable ($i/$max)" - ping -c1 $vm_ip &>/dev/null - success=$? - - if [ $i -lt $max ] - then - i=$[$i+1] - else - echo "Timeout waiting for $vm_ip" - exit 2 - fi - - sleep 2 - done - - } - echo "${{ secrets.ssh_pem_fleet_aws_vm }}" > ~/.ssh/aws_slave.pem - sudo chmod 600 ~/.ssh/aws_slave.pem - while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 ; do echo Waiting for other software managers to finish... ; sleep 5;done - sudo apt install -y python3.9 python3.9-venv - python3.9 -m venv ansible-venv - source ansible-venv/bin/activate - python3 -m pip install -r requirements.txt - ansible-galaxy install -r requirements.yml --timeout 120 - - stripped_ips=$(echo ${{ inputs.fleet_ips }} | sed "s;\[;;g" | sed "s;];;g" | sed "s; ;;g") - touch ~/.ssh/known_hosts - sudo chmod 600 ~/.ssh/known_hosts - IFS=',' read -r -a stripped_ips_arr <<< $stripped_ips - manager_ip=${stripped_ips_arr[0]} - echo $manager_ip - echo "manager_ip=${manager_ip}" >> $GITHUB_OUTPUT - for ip in "${stripped_ips_arr[@]}" - do - ensure_agent_up $ip - ssh-keygen -f ~/.ssh/known_hosts -R $ip - ssh-keyscan -H $ip >> ~/.ssh/known_hosts - done - - ansible-playbook install.yml \ - -i ../staging/hosts \ - --key-file ~/.ssh/aws_slave.pem \ - --extra-vars=@"$(pwd)/.."/product-manifest.yaml \ - -e fleet_domain_dns="" \ - -e "{\"proxycerts__remote_redis_servers_fqn\": [$(cat ../staging/manager_private_ip.txt)]}" \ - -e '{"fleet_extra_hosts": ["172.22.0.106 registry.hel.mov.ai traefik"]}' \ - --skip-tags "validate,ufw,hardening" - execution_status=$? - deactivate - exit $execution_status - - - name: Setup QA API tests - id: api_tests_setup - shell: bash - run: | - qa_key=api_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: API tests - timeout-minutes: 30 - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - . venv/bin/activate - - pytest src \ - --movai-ip ${{ steps.ansible_install_platform.outputs.manager_ip }} \ - --movai-user admin \ - --movai-pw admin@123 \ - -m fleet - - deactivate - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.api_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - for container in backend spawner messager-server; do - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^${container}-.*") - docker logs "${CONTAINER_ID}" &> "${container}.log" || true - done || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Collect Fleet QA artifacts - working-directory: ${{ steps.ansible_install_setup.outputs.target_dir }} - if: always() - shell: bash - env: - API_DIR: ${{ steps.api_tests_setup.outputs.target_dir }} - run: | - rm -rf fleet_qa_artifacts - mkdir -p fleet_qa_artifacts/install - source ansible-venv/bin/activate - # install fleet_tests artifacts - for fleet_host in "manager" "member0" "member1"; do - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u movai-service --since "1hour ago"' > fleet_qa_artifacts/install/$fleet_host.log || true - - echo "From $fleet_host:" - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'docker ps -a' > fleet_qa_artifacts/install/$fleet_host-docker_ps.log || true - echo "$(tail -n +2 fleet_qa_artifacts/install/$fleet_host-docker_ps.log )" - - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u docker --boot --lines=all' > fleet_qa_artifacts/install/$fleet_host-all-docker.log || true - done - - deactivate - - # qa api tests artifacts - # *.log and *.zip might not exist if the test fails early - mkdir -p fleet_qa_artifacts/api - cp -r "${API_DIR}"/*.log fleet_qa_artifacts/api || true - cp -r "${API_DIR}"/*.tar fleet_qa_artifacts/api || true - cp -r "${API_DIR}"/results/*.zip fleet_qa_artifacts/api || true - - - name: Stash Fleet QA artifacts - if: always() - uses: actions/upload-artifact@v3 - with: - name: fleet_qa_artifacts - path: ${{ steps.ansible_install_setup.outputs.target_dir }}/fleet_qa_artifacts/* - retention-days: 5 - - - name: Teardown remote vms (Proxmox) - working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} - if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} - shell: bash - run: terraform destroy -auto-approve - env: - TF_VAR_number_agents: ${{ inputs.fleet_number_members }} - TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} - TF_VAR_ip_list: ${{ inputs.fleet_ips }} - - - Build-Simulator: - needs: [Validate-boostrap-configs] - runs-on: integration-pipeline - env: - DISTRO: noetic - outputs: - slack_thread_id: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: unstash sim_configs - uses: actions/download-artifact@v3 - with: - name: sim_configs - path: simulator_artifacts - - - name: Prepare Skip variables - id: pre_simulator_build - run: | - if [ ! -f "simulator_artifacts/version" ]; then - echo "skip_simulator_build=true" >> $GITHUB_OUTPUT - else - echo "skip_simulator_build=false" >> $GITHUB_OUTPUT - fi - - - name: Lint docker image - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - shell: bash - run: | - wget https://github.com/hadolint/hadolint/releases/download/v2.9.3/hadolint-Linux-x86_64 - chmod +x hadolint-Linux-x86_64 - ./hadolint-Linux-x86_64 docker/$DISTRO/Dockerfile-simulator -t error - - - name: Download models - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - shell: bash - run: | - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline fetch_simulator_models \ - --manifest_platform_base_key product_components \ - --gh_api_user $GITHUB_API_USR \ - --gh_api_pwd ${{ secrets.auto_commit_pwd }} \ - --target_dir "./models" - if [ ! -d ./models ]; then mkdir -p ./models; fi - - - name: Login to Private Registry - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Prepare docker build variables - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - id: pre_build - run: | - echo "image_name=$(cat simulator_artifacts/simulator_name.ci)" >> $GITHUB_OUTPUT - echo "base_name=$(cat simulator_artifacts/simulator_base.ci)" >> $GITHUB_OUTPUT - - - name: Build with args and push:${{ inputs.deploy }} - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - uses: docker/build-push-action@v3 - with: - context: . - platforms: linux/amd64 - file: docker/${{ env.DISTRO }}/Dockerfile-simulator - push: true - tags: "${{ env.REGISTRY }}/qa/${{ steps.pre_build.outputs.image_name }}" - pull: true - build-args: | - BASE_IMAGE=${{ steps.pre_build.outputs.base_name }} - CI_SCRIPT_VERSION=${{ env.CI_INTEGRATION_SCRIPTS_VERSION }} - - - name: Collect Installed components - if: ${{ steps.pre_simulator_build.outputs.skip_simulator_build == 'false' }} - shell: bash - run: | - cd simulator_artifacts - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline publish_simulator_state_artifacts \ - --product_name ${{ inputs.product_name }} \ - --branch ${GITHUB_REF#refs/heads/} - - - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} - - - - name: pre-stash - shell: bash - run: | - echo "$REGISTRY/qa/$(cat simulator_artifacts/simulator_name.ci)" > simulator.image.artifact - - - name: Stash deploy_simulator_artifacts - uses: actions/upload-artifact@v3 - with: - name: deploy_simulator_artifacts - path: simulator.image.artifact - retention-days: 5 - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - Simulator-Validations: - needs: [Build-Simulator] - runs-on: integration-pipeline - steps: - - uses: rtCamp/action-cleanup@master - - - name: Checkout - uses: actions/checkout@v3 - - - name: Agent info - run: | - echo "public ip: $(curl ipinfo.io/ip)" - echo "private ip: $(hostname -I | awk '{print $1}')" - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: . - - - name: unstash sim_configs - uses: actions/download-artifact@v3 - with: - name: sim_configs - path: simulator_artifacts - - - name: unstash robot_jsons_noetic - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: Login to Private Registry - uses: docker/login-action@v2 - with: - username: ${{ secrets.registry_user }} - password: ${{ secrets.registry_password }} - registry: ${{ env.REGISTRY }} - - - name: Setup QA Flow tests - id: sim_flow_tests_setup - shell: bash - run: | - qa_key=flow_tests - - rm -f /tmp/target_dir.txt /tmp/version.txt /tmp/repo_name.txt /tmp/test_set.txt - export PATH="$HOME/.local/bin:$PATH" - - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.target_dir --output_file /tmp/target_dir.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.version --output_file /tmp/version.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.name --output_file /tmp/repo_name.txt - integration-pipeline get_yml_value --file product-manifest.yaml --key product_components.qa.$qa_key.test_set --output_file /tmp/test_set.txt - - tests_dir=$(cat /tmp/target_dir.txt) - tests_version=$(cat /tmp/version.txt) - tests_repo_name=$(cat /tmp/repo_name.txt) - test_set=$(cat /tmp/test_set.txt) - - rm -rf $tests_repo_name - integration-pipeline fetch_by_tag --repo $tests_repo_name --version $tests_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $tests_dir - ls -la $tests_dir - - echo "target_dir=${tests_dir}" >> $GITHUB_OUTPUT - echo "version=${tests_version}" >> $GITHUB_OUTPUT - echo "test_set=${test_set}" >> $GITHUB_OUTPUT - - # setup venv in a step that is always executed - pushd "${tests_dir}" - rm -rf venv - python3 -m venv venv - . venv/bin/activate - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - pip install -r requirements.txt - deactivate - popd - - - name: Installation - id: install - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - mkdir -p artifacts - cp *.json artifacts/ - - CONFIG_FILE_NAME="basic-standalone-ignition-noetic.json" - mkdir -p userspace/ + rm -rf fleet_qa_artifacts + mkdir -p fleet_qa_artifacts/install + source ansible-venv/bin/activate + # install fleet_tests artifacts + for fleet_host in "manager" "member0" "member1"; do + ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u movai-service --since "1hour ago"' > fleet_qa_artifacts/install/$fleet_host.log || true - export USERSPACE_FOLDER_PATH="$(pwd)/userspace" - export PUBLIC_IP=$(hostname -I | awk '{print $1}') + echo "From $fleet_host:" + ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'docker ps -a' > fleet_qa_artifacts/install/$fleet_host-docker_ps.log || true + echo "$(tail -n +2 fleet_qa_artifacts/install/$fleet_host-docker_ps.log )" - export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key services_version --output_file movai_service_version - integration-pipeline get_json_value --file $CONFIG_FILE_NAME.ci --key quickstart_version --output_file quickstart_version - - wget https://movai-scripts.s3.amazonaws.com/QuickStart_$(cat quickstart_version).bash - chmod +x ./QuickStart_$(cat quickstart_version).bash - ./QuickStart_$(cat quickstart_version).bash --apps $(cat movai_service_version) $CONFIG_FILE_NAME - MOVAI_USER="ci" - MOVAI_PWD="4Iva6UHAQq9DGITj" - for robot in $(movai-cli robots list); do - movai-cli robots user "$robot" "$MOVAI_USER" "$MOVAI_PWD" + ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u docker --boot --lines=all' > fleet_qa_artifacts/install/$fleet_host-all-docker.log || true done - echo "movai_user=${MOVAI_USER}" >> $GITHUB_OUTPUT - echo "movai_pwd=${MOVAI_PWD}" >> $GITHUB_OUTPUT - env: - DISPLAY: ":0" - SIMULATION_ID: "CI" - - - name: Simulator tests - timeout-minutes: 30 - working-directory: ${{ steps.sim_flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # install test dependencies on host - sudo apt install -y --allow-downgrades python3-rosnode python3-rosparam python3-rostopic - export PYTHONPATH="${PYTHONPATH}:/usr/lib/python3/dist-packages" - - # install test dependencies on spawner - ## get spawner container name - CONTAINER_ID=$(docker ps --format '{{.Names}}' --filter "name=^spawner-.*") - ## get apt dependencies - APT_DEPS=$(cat apt-requirements.txt | tr "\n" " ") - ## install - docker exec -t "${CONTAINER_ID}" bash -c " - sudo apt update - sudo apt install -y ${APT_DEPS} - " - - # run tests - . venv/bin/activate - - pytest \ - -s \ - -ra \ - --movai-user ${{ steps.install.outputs.movai_user }} \ - --movai-pw ${{ steps.install.outputs.movai_pwd }} \ - -m 'simulator' \ - --tb=short - deactivate - - name: Get current job id - if: always() - shell: bash - id: job_info - run: | - sudo apt install jq -y - job_id=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | .[0].id') - job_html_url=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id}}/attempts/${{ github.run_attempt }}/jobs | jq -r '.jobs | map(select(.name | contains("${{ github.job }}"))) | .[0].html_url') - echo "$job_id" - echo "$job_html_url" - echo "job_url=$job_html_url" >> $GITHUB_OUTPUT - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack_result - run: | - MESSAGE=":white_check_mark: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job passed" - MESSAGE_ERR=":x: ${{ github.job }} (Attempt: #${{ github.run_attempt }}) job failed" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - echo "msg_error=${MESSAGE_ERR}\n Details: ${{ steps.job_info.outputs.job_url }}" >> $GITHUB_OUTPUT - - - name: Slack message success - uses: archive/github-actions-slack@master - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} - - - name: Slack message failure - uses: archive/github-actions-slack@master - if: failure() - with: - slack-function: send-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-text: ${{ steps.pre_slack_result.outputs.msg_error }} - slack-optional-thread_ts: ${{ needs.Build-Simulator.outputs.slack_thread_id }} - - - - name: Save docker container logs - if: always() - working-directory: ${{ steps.sim_flow_tests_setup.outputs.target_dir }} - shell: bash - run: | - # for sanity - docker ps -a - - # backend - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^backend-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # spawner - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^spawner-.*") - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" - - # message-server - CONTAINER_ID=$(docker ps -a --format '{{.Names}}' --filter "name=^message-server-.*") || true - docker logs "${CONTAINER_ID}" &> "${CONTAINER_ID}.log" || true - - # movai-service - journalctl -u movai-service --since '1hour ago' &> "movai-service.log" - - # spawner (mobros firmware) - journalctl -u movai-service -t mobros --since '1hour ago' &> spawner-firmware.log || true - - - name: Stash QA artifacts - if: always() - shell: bash - env: - SIM_DIR: ${{ steps.sim_flow_tests_setup.outputs.target_dir }} - run: | - # cleanup - rm -rf qa_artifacts - - # tests artifacts - # *.log might not exist if the test fails early - mkdir -p qa_artifacts - cp -r "${SIM_DIR}"/*.log ./qa_artifacts || true - cp -r "${SIM_DIR}"/*.tar ./qa_artifacts || true + # qa api tests artifacts + # *.log and *.zip might not exist if the test fails early + mkdir -p fleet_qa_artifacts/api + cp -r "${API_DIR}"/*.log fleet_qa_artifacts/api || true + cp -r "${API_DIR}"/*.tar fleet_qa_artifacts/api || true + cp -r "${API_DIR}"/results/*.zip fleet_qa_artifacts/api || true - - name: Stash QA artifacts + - name: Stash Fleet QA artifacts if: always() uses: actions/upload-artifact@v3 with: - name: qa_artifacts_simulator_tests - path: qa_artifacts/* - retention-days: 5 - - - name: Collect Installed components - shell: bash - run: | - mkdir -p artifacts - - used_images=($(docker images --format "{{.Repository}}:{{.Tag}}" | tr ' ' "\n")) - for image in "${used_images[@]}" - do - image_short_name=$(grep -oP "(?<=/$ENV/).*?(?=:)" <<< "$image" || grep -oP "(?<=/devops/).*?(?=:)" <<< "$image" || true) - if [[ "$image_short_name" =~ .*"spawner".* ]]; - then - echo "scanning $image" - container_ids=($(docker ps -q -f "ancestor=$image" | tr ' ' "\n")) - for container_id in "${container_ids[@]}" - do - container_name=$(docker inspect --format="{{.Name}}" $container_id) - docker exec -t "$container_id" bash -c ' - set -e - - sudo apt update || apt update - export PATH="$HOME/.local/bin:$PATH" - python3 -m pip install --upgrade pip || wget https://bootstrap.pypa.io/get-pip.py -O - | python3 - python3 -m pip install -i https://artifacts.cloud.mov.ai/repository/pypi-integration/simple --extra-index-url https://pypi.org/simple movai-package-deployer==${{ env.PACKAGE_DEPLOYER_VERSION }} - package-deployer scan - ls -la /tmp - ' || true - docker cp $container_id:/tmp/deployable.dploy artifacts/$container_name-noetic-deployable.dploy - docker cp $container_id:/tmp/undeployable.dploy artifacts/$container_name-noetic-3rdParty.dploy - done - else - echo "Skipping scan of $image" - fi - done - export PATH="$HOME/.local/bin:$PATH" - package-deployer scan - cp /tmp/deployable.dploy artifacts/simulator-noetic-deployable.dploy - cp /tmp/undeployable.dploy artifacts/simulator-noetic-3rdParty.dploy - - - name: Stash deploy_artifacts_simulator_noetic - uses: actions/upload-artifact@v3 - with: - name: deploy_artifacts_simulator_noetic - path: artifacts/*.dploy + name: fleet_qa_artifacts + path: ${{ steps.ansible_install_setup.outputs.target_dir }}/fleet_qa_artifacts/* retention-days: 5 - - name: Remove robots - if: always() - shell: bash - run: | - for robot in $(movai-cli robots list); do - movai-cli robots stop $robot - sleep 5 - movai-cli robots remove $robot - done || true - - - name: Docker cleanups - if: always() - shell: bash - run: | - docker system prune -f - docker image prune --all -f - - publish: - needs: [Validations-Finish, Fleet-Validations, Simulator-Validations] - runs-on: integration-pipeline - outputs: - slack_thread_id: ${{ needs.Validations-Finish.outputs.slack_thread_id }} - steps: - - name: Cleanup Workspace - uses: rtCamp/action-cleanup@master - - name: Checkout - uses: actions/checkout@v3 - - name: Agent info - id: agent_info - run: | - ip=$(hostname -I | awk '{print $1}') - echo $ip - echo "ip=${ip}" >> $GITHUB_OUTPUT - - name: unstash robot_configs - uses: actions/download-artifact@v3 - with: - name: robot_configs - path: . - - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: unstash deploy_artifacts_noetic - uses: actions/download-artifact@v3 - with: - name: deploy_artifacts_noetic - path: artifacts - - - name: unstash deploy_artifacts_simulator_noetic - uses: actions/download-artifact@v3 - with: - name: deploy_artifacts_simulator_noetic - path: artifacts - - - name: unstash deploy_simulator_artifacts - uses: actions/download-artifact@v3 - with: - name: deploy_simulator_artifacts - path: . - - - name: Install CI Scripts - shell: bash - run: | - python3 -m pip install pip --upgrade - python3 -m pip install pyopenssl --upgrade - python3 -m pip install integration-pipeline==$CI_INTEGRATION_SCRIPTS_VERSION --ignore-installed - - - name: Install Package Deployer - shell: bash - run: python3 -m pip install movai-package-deployer==$PACKAGE_DEPLOYER_VERSION --ignore-installed - - - name: Publish and create release - shell: bash - run: | - set -m - set -e - - export PATH="$HOME/.local/bin:$PATH" - git config --global --add safe.directory $(pwd) - git config --global user.name '${{ secrets.auto_commit_user }}' - git config --global user.email '${{ secrets.auto_commit_mail }}' - git config --global user.password ${{ secrets.auto_commit_pwd }} - - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - - mkdir -p deployment_artifacts - package-deployer join --dploy_workspace "$(pwd)/artifacts" - integration-pipeline get_image_list_from_manifest --manifest_platform_base_key product_components --docker_registry $REGISTRY - cp *.json deployment_artifacts - cp artifacts/merged.dploy deployment_artifacts/deployable.dploy - echo -e "$(cat ./artifacts/product.image.artifact)\n$(cat ./simulator.image.artifact)" > deployment_artifacts/product.image.artifact - - cp product.version deployment_artifacts - cp product-manifest.yaml deployment_artifacts - product_version=$(cat product.version) - - # danger zone. Everything will be deleted. - mv product-manifest.yaml product-manifest.yaml.bck - - git restore product.version - git restore product-manifest.yaml - git pull - echo "$product_version" > product.version - - git add product.version - git commit -m "[skip actions] Automatic Raise" - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare raise variables - id: pre_raise - run: | - echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT - - - name: Raise App version - uses: CasperWA/push-protected@v2.14.0 - with: - token: ${{ secrets.auto_commit_pwd }} - branch: ${{ steps.pre_raise.outputs.branch }} - unprotect_reviews: true - - - name: Github Publish - shell: bash - run: | - commit_hash=$(git log --format="%H" -n 1) - product_version=$(cat product.version) - gh release create -p --generate-notes --target "$commit_hash" -t "${{ inputs.product_name }} $product_version" $product_version - # add all files in the deployment_artifacts folder - find deployment_artifacts -type f -exec gh release upload $product_version {} \; - env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Update release notes + - name: Teardown remote vms (Proxmox) + working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} + if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} shell: bash - run: | - # release version - product_version=$(cat product.version) - - # get existent release body - ORIGINAL_RN=$(gh release view "${product_version}" --json body | jq -r .body) - echo -e "ORIGINAL_RN:\n ${ORIGINAL_RN}" - - # get release PRs - PRS=$(echo "${ORIGINAL_RN}" | sed -rn "s/.* by @.* in https:\/\/github\.com\/${{ github.repository_owner }}\/${{ github.event.repository.name }}\/pull\/([0-9]+).*/\1/p" | tr '\n' ' ') - # change to array - PRS=($PRS) - echo "Found the following PRs: ${PRS[@]}" - - # new release notes file - rm -rf notes.txt - - # What's Changed - with info from PRs - echo "## What's Changed" >> notes.txt - - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No relevant changes." >> notes.txt - else - # PRs exist - for pr in "${PRS[@]}"; do - gh pr view "${pr}" --json body | jq -r .body >> notes.txt - done - fi - echo "" >> notes.txt - - # PRs - echo "## PRs" >> notes.txt - if [ ${#PRS[@]} -eq 0 ]; then - # no PRs exist - echo "No PRs." >> notes.txt - else - # PRs exist - echo "${ORIGINAL_RN}" | grep "\* .* by @.* in https://github.com/${{ github.repository_owner }}/" >> notes.txt - fi - echo "" >> notes.txt - - ## Diff - echo "## Diff" >> notes.txt - echo "${ORIGINAL_RN}" | grep "\*\*Full Changelog\*\*" >> notes.txt - - # set new release notes - gh release edit "${product_version}" --notes-file notes.txt + run: terraform destroy -auto-approve env: - GITHUB_TOKEN: ${{ secrets.gh_token }} - - - name: Prepare slack variables - if: always() - id: pre_slack - run: | - MESSAGE=":white_check_mark: CI: ${GITHUB_REPOSITORY} (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is stable :sunny: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg=${MESSAGE}" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg }} - slack-update-message-ts: ${{ needs.Validations-Finish.outputs.slack_thread_id }} - - - - Run-Status: - runs-on: ubuntu-20.04 - needs: [publish, Validate-boostrap-configs] - if: ${{ always() && ( needs.publish.result == 'failure' || needs.publish.result == 'cancelled' || needs.publish.result == 'skipped') }} - steps: - - name: unstash raised_meta - uses: actions/download-artifact@v3 - with: - name: raised_meta - path: platform_configs - - - name: Copy product configs - shell: bash - run: | - cp ./platform_configs/product.version product.version - cp ./platform_configs/product-manifest.yaml product-manifest.yaml - - - name: Prepare slack variables - id: pre_slack - run: | - MESSAGE_ERR=":x: CI: ${GITHUB_REPOSITORY} (${GITHUB_REF#refs/heads/}), build: $(cat product.version) (Attempt: #${{ github.run_attempt }}) is unstable (or cancelled) :rain_cloud: Details: https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" - echo "msg_error=${MESSAGE_ERR}" >> $GITHUB_OUTPUT - - - name: Slack message - uses: archive/github-actions-slack@master - with: - slack-function: update-message - slack-bot-user-oauth-access-token: ${{ secrets.slack_token_id }} - slack-channel: ${{ env.SLACK_CHANNEL }} - slack-update-message-text: ${{ steps.pre_slack.outputs.msg_error }} - slack-update-message-ts: ${{ needs.Validate-boostrap-configs.outputs.slack_thread_id }} + TF_VAR_number_agents: ${{ inputs.fleet_number_members }} + TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" + TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} + TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} + TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} + TF_VAR_ip_list: ${{ inputs.fleet_ips }} From 5d4fc316f21ff76d0283bbfd7f44dd5a25cf3291 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Fri, 22 Mar 2024 17:36:28 +0000 Subject: [PATCH 38/78] remove old path --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index fcf7f33c..0d365669 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -239,7 +239,7 @@ jobs: export PATH="$HOME/.local/bin:$PATH" integration-pipeline fetch_by_tag --repo $provision_infra_repo_name --version $provision_infra_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $provision_infra_dir ls -la $provision_infra_dir - echo "target_dir=${provision_infra_dir}/hosts/generic/" >> $GITHUB_OUTPUT + echo "target_dir=${provision_infra_dir}" >> $GITHUB_OUTPUT - name: Define Instance names id: infra_names From 2ab4d37d63393ff18486215d8ea5d554207e0381 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 16:37:54 +0000 Subject: [PATCH 39/78] use conf envs --- .../workflows/integration-build-platform.yml | 86 +++++++++++-------- 1 file changed, 51 insertions(+), 35 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 0d365669..220faae7 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -43,9 +43,9 @@ on: required: true ssh_pem_fleet_aws_vm: required: true - proxmox_api_token_id: + proxmox_ve_username: required: true - proxmox_api_token_secret: + proxmox_ve_password: required: true jira_username: required: true @@ -227,12 +227,26 @@ jobs: echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list sudo apt update && sudo apt install terraform -y + - name: Setup infra environment configs + id: infra_env_configs_setup + shell: bash + run: | + env_configs_dir=infra_env_configs + env_configs_version=0.0.0-1 + env_configs_repo_name=devops-tf-env-conf + + rm -rf $env_configs_dir + export PATH="$HOME/.local/bin:$PATH" + integration-pipeline fetch_by_tag --repo $env_configs_repo_name --version $env_configs_version --gh_api_user $GITHUB_API_USR --gh_api_pwd ${{ secrets.auto_commit_pwd }} --target_dir $env_configs_dir + ls -la $env_configs_dir + echo "target_dir=${env_configs_dir}" >> $GITHUB_OUTPUT + - name: Setup terraform proxmox provisioner id: provision_infra_setup shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.0-10 + provision_infra_version=0.0.2-0 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir @@ -259,31 +273,24 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | + multiply_node=$(printf '"hel",%.0s' {1..${{ inputs.fleet_number_members }}}) + node_list_str=${multiply_node::-1} + + var_file_arg = '-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + + var_args='-var='proxmox_host_list=[$node_list_str]'' \ + -var='fleet_peer_nr=${{ inputs.fleet_number_members }}' \ + -var='fleet_manager_name=\"${{ steps.infra_names.outputs.manager_prefix }}\"' \ + -var='fleet_peer_name_prefix=\"${{ steps.infra_names.outputs.worker_prefix }}\"' \ + -var='ip_list=${{ inputs.fleet_ips }} + -var='proxmox_ve_username'=${{ secrets.proxmox_ve_username }}' \ + -var='proxmox_ve_password'=${{ secrets.proxmox_ve_password }}' + + echo "Var args: $var_args" + terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" - terraform plan - terraform apply -auto-approve - terraform refresh - env: - TF_VAR_number_agents: ${{ inputs.fleet_number_members }} - TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} - TF_VAR_ip_list: ${{ inputs.fleet_ips }} - TF_VAR_proxmox_host: "hel" - TF_VAR_vm_gateway: "172.22.0.1" - TF_VAR_ip_mask: 24 - TF_VAR_bios: "seabios" - TF_VAR_pool: "IP-Temp-VMs" - TF_VAR_tags: "ip-fleet" - - TF_VAR_fleet_hosts_user: "devops" - TF_VAR_template_name: "ubuntu-2004-cloudinit-template2" - TF_VAR_fleet_manager_name: ${{ steps.infra_names.outputs.manager_prefix }} - TF_VAR_fleet_manager_memory: 8192 - TF_VAR_template_name_no_gpu: "ubuntu-2004-cloudinit-template2" - TF_VAR_fleet_worker_name_prefix: ${{ steps.infra_names.outputs.worker_prefix }} - TF_VAR_fleet_worker_memory: 8192 + terraform apply -auto-approve $var_file_arg $var_args + terraform refresh $var_file_arg $var_args - name: Prepare Devops provisioning slack message if: always() @@ -548,12 +555,21 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} shell: bash - run: terraform destroy -auto-approve - env: - TF_VAR_number_agents: ${{ inputs.fleet_number_members }} - TF_VAR_proxmox_api_url: "https://hel.mov.ai:8006/api2/json" - TF_VAR_proxmox_api_token_id: ${{ secrets.proxmox_api_token_id }} - TF_VAR_proxmox_api_token_secret: ${{ secrets.proxmox_api_token_secret }} - TF_VAR_provision_ssh_pem: ${{ secrets.ssh_pem_fleet_aws_vm }} - TF_VAR_ip_list: ${{ inputs.fleet_ips }} + run: | + multiply_node=$(printf '"hel",%.0s' {1..${{ inputs.fleet_number_members }}}) + node_list_str=${multiply_node::-1} + + var_file_arg = '-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + + var_args='-var='proxmox_host_list=[$node_list_str]'' \ + -var='fleet_peer_nr=${{ inputs.fleet_number_members }}' \ + -var='fleet_manager_name=\"${{ steps.infra_names.outputs.manager_prefix }}\"' \ + -var='fleet_peer_name_prefix=\"${{ steps.infra_names.outputs.worker_prefix }}\"' \ + -var='ip_list=${{ inputs.fleet_ips }} + -var='proxmox_ve_username'=${{ secrets.proxmox_ve_username }}' \ + -var='proxmox_ve_password'=${{ secrets.proxmox_ve_password }}' + + echo "Var args: $var_args" + terraform destroy -auto-approve $var_file_arg $var_args + From 07e0d6221f29e19c9fb967c5a0922f904dd7b998 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 17:29:42 +0000 Subject: [PATCH 40/78] lets try --- .../workflows/integration-build-platform.yml | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 220faae7..7298f9b2 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -273,18 +273,20 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - multiply_node=$(printf '"hel",%.0s' {1..${{ inputs.fleet_number_members }}}) + total_resources=${{ inputs.fleet_number_members }} + ((total_resources+=1)) + multiply_node=$(printf '"hel",%.0s' {1..total_resources}) node_list_str=${multiply_node::-1} var_file_arg = '-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' - var_args='-var='proxmox_host_list=[$node_list_str]'' \ - -var='fleet_peer_nr=${{ inputs.fleet_number_members }}' \ - -var='fleet_manager_name=\"${{ steps.infra_names.outputs.manager_prefix }}\"' \ - -var='fleet_peer_name_prefix=\"${{ steps.infra_names.outputs.worker_prefix }}\"' \ - -var='ip_list=${{ inputs.fleet_ips }} - -var='proxmox_ve_username'=${{ secrets.proxmox_ve_username }}' \ - -var='proxmox_ve_password'=${{ secrets.proxmox_ve_password }}' + var_args="-var='proxmox_host_list=[$node_list_str]'" \ + var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" + var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" + var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" + var_args+=" -var='ip_list=${{ inputs.fleet_ips }}'" + var_args+=" -var='proxmox_ve_username=${{ secrets.proxmox_ve_username }}'" + var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" echo "Var args: $var_args" From bfda7dd7cc552c8197d85876eb2c629931d0a5ed Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 17:30:18 +0000 Subject: [PATCH 41/78] lets try --- .../workflows/integration-build-platform.yml | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 7298f9b2..d7ab93bc 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -558,20 +558,23 @@ jobs: if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} shell: bash run: | - multiply_node=$(printf '"hel",%.0s' {1..${{ inputs.fleet_number_members }}}) + total_resources=${{ inputs.fleet_number_members }} + ((total_resources+=1)) + multiply_node=$(printf '"hel",%.0s' {1..total_resources}) node_list_str=${multiply_node::-1} var_file_arg = '-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' - var_args='-var='proxmox_host_list=[$node_list_str]'' \ - -var='fleet_peer_nr=${{ inputs.fleet_number_members }}' \ - -var='fleet_manager_name=\"${{ steps.infra_names.outputs.manager_prefix }}\"' \ - -var='fleet_peer_name_prefix=\"${{ steps.infra_names.outputs.worker_prefix }}\"' \ - -var='ip_list=${{ inputs.fleet_ips }} - -var='proxmox_ve_username'=${{ secrets.proxmox_ve_username }}' \ - -var='proxmox_ve_password'=${{ secrets.proxmox_ve_password }}' + var_args="-var='proxmox_host_list=[$node_list_str]'" \ + var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" + var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" + var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" + var_args+=" -var='ip_list=${{ inputs.fleet_ips }}'" + var_args+=" -var='proxmox_ve_username=${{ secrets.proxmox_ve_username }}'" + var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" echo "Var args: $var_args" + terraform destroy -auto-approve $var_file_arg $var_args From 02071809ec208fb397c536d425fa43bf8047236a Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 17:34:51 +0000 Subject: [PATCH 42/78] fix bash --- .github/workflows/integration-build-platform.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index d7ab93bc..71fc86c6 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -278,7 +278,7 @@ jobs: multiply_node=$(printf '"hel",%.0s' {1..total_resources}) node_list_str=${multiply_node::-1} - var_file_arg = '-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' var_args="-var='proxmox_host_list=[$node_list_str]'" \ var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" @@ -563,7 +563,7 @@ jobs: multiply_node=$(printf '"hel",%.0s' {1..total_resources}) node_list_str=${multiply_node::-1} - var_file_arg = '-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' var_args="-var='proxmox_host_list=[$node_list_str]'" \ var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" From 909ee32a63bf949bd3aa73d8de42562ca4f69cbe Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 17:46:59 +0000 Subject: [PATCH 43/78] another one --- .github/workflows/integration-build-platform.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 71fc86c6..a08be040 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -275,10 +275,10 @@ jobs: run: | total_resources=${{ inputs.fleet_number_members }} ((total_resources+=1)) - multiply_node=$(printf '"hel",%.0s' {1..total_resources}) + multiply_node=$(printf '"hel",%.0s' {1..$total_resources}) node_list_str=${multiply_node::-1} - var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars"' var_args="-var='proxmox_host_list=[$node_list_str]'" \ var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" @@ -560,10 +560,10 @@ jobs: run: | total_resources=${{ inputs.fleet_number_members }} ((total_resources+=1)) - multiply_node=$(printf '"hel",%.0s' {1..total_resources}) + multiply_node=$(printf '"hel",%.0s' {1..$total_resources}) node_list_str=${multiply_node::-1} - var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars"' var_args="-var='proxmox_host_list=[$node_list_str]'" \ var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" From 102d9835ff41aac7dae851726b1661920af0345c Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 18:08:55 +0000 Subject: [PATCH 44/78] fix i hope --- .github/workflows/integration-build-platform.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index a08be040..7f05f174 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -265,20 +265,21 @@ jobs: local_worker_prefix="ip-$branch-worker" echo "$local_manager_prefix" echo "$local_worker_prefix" + total_resources=${{ inputs.fleet_number_members }} + ((total_resources+=1)) echo "manager_prefix=${local_manager_prefix}" >> $GITHUB_OUTPUT echo "worker_prefix=${local_worker_prefix}" >> $GITHUB_OUTPUT + echo "total_resources=${total_resources}" >> $GITHUB_OUTPUT - name: Provision remote vms (Proxmox) working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - total_resources=${{ inputs.fleet_number_members }} - ((total_resources+=1)) - multiply_node=$(printf '"hel",%.0s' {1..$total_resources}) + multiply_node=$(printf '"hel",%.0s' {1${{ steps.infra_names.outputs.total_resources }}}) node_list_str=${multiply_node::-1} - var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars"' + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' var_args="-var='proxmox_host_list=[$node_list_str]'" \ var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" @@ -563,7 +564,7 @@ jobs: multiply_node=$(printf '"hel",%.0s' {1..$total_resources}) node_list_str=${multiply_node::-1} - var_file_arg='-var-file="../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars"' + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars"' var_args="-var='proxmox_host_list=[$node_list_str]'" \ var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" From 38e72394a4a9e54bfba77fc87368f1ee6707660d Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 18:09:36 +0000 Subject: [PATCH 45/78] fix i hope --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 7f05f174..30f85868 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -290,7 +290,7 @@ jobs: var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" echo "Var args: $var_args" - + echo "File args: $var_file_arg" terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" terraform apply -auto-approve $var_file_arg $var_args terraform refresh $var_file_arg $var_args From 4f161e11ad197ea39563633dc90d2b0242ad1104 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 18:13:12 +0000 Subject: [PATCH 46/78] fix i hope --- .github/workflows/integration-build-platform.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 30f85868..0cd2f802 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -281,13 +281,13 @@ jobs: var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' - var_args="-var='proxmox_host_list=[$node_list_str]'" \ - var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" - var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" - var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" - var_args+=" -var='ip_list=${{ inputs.fleet_ips }}'" - var_args+=" -var='proxmox_ve_username=${{ secrets.proxmox_ve_username }}'" - var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" + var_args="-var=proxmox_host_list=[$node_list_str]" \ + var_args+=" -var=fleet_peer_nr=${{ inputs.fleet_number_members }}" + var_args+=" -var=fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}" + var_args+=" -var=fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}" + var_args+=" -var=ip_list=${{ inputs.fleet_ips }}" + var_args+=" -var=proxmox_ve_username=${{ secrets.proxmox_ve_username }}" + var_args+=" -var=proxmox_ve_password=${{ secrets.proxmox_ve_password }}" echo "Var args: $var_args" echo "File args: $var_file_arg" From d7eeb1b27e613523b4cf91210902c898db120641 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 18:17:22 +0000 Subject: [PATCH 47/78] fix i hope --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 0cd2f802..04d0298c 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-0 + provision_infra_version=0.0.2-1 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir From c1c4dfc655d86931051e75a8225185b6e15297ef Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 18:20:38 +0000 Subject: [PATCH 48/78] fix i hope --- .github/workflows/integration-build-platform.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 04d0298c..98822665 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -281,13 +281,13 @@ jobs: var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' - var_args="-var=proxmox_host_list=[$node_list_str]" \ - var_args+=" -var=fleet_peer_nr=${{ inputs.fleet_number_members }}" - var_args+=" -var=fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}" - var_args+=" -var=fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}" - var_args+=" -var=ip_list=${{ inputs.fleet_ips }}" - var_args+=" -var=proxmox_ve_username=${{ secrets.proxmox_ve_username }}" - var_args+=" -var=proxmox_ve_password=${{ secrets.proxmox_ve_password }}" + var_args="-var='proxmox_host_list=[$node_list_str]'" \ + var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" + var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" + var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" + var_args+=" -var='ip_list=${{ inputs.fleet_ips }}'" + var_args+=" -var='proxmox_ve_username=${{ secrets.proxmox_ve_username }}'" + var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" echo "Var args: $var_args" echo "File args: $var_file_arg" From c9bc7ba034bc7cd860b63b7aafc49274324257e2 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Tue, 26 Mar 2024 18:24:50 +0000 Subject: [PATCH 49/78] fix i hope --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 98822665..20ad353b 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -281,7 +281,7 @@ jobs: var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' - var_args="-var='proxmox_host_list=[$node_list_str]'" \ + var_args="-var='proxmox_host_list=[$node_list_str]'" var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" From 75b6e37292403ae70e2a0deff430b1b6928b7dc5 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 10:07:27 +0000 Subject: [PATCH 50/78] fix i hope --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 20ad353b..55e43042 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -278,7 +278,7 @@ jobs: run: | multiply_node=$(printf '"hel",%.0s' {1${{ steps.infra_names.outputs.total_resources }}}) node_list_str=${multiply_node::-1} - + set -x var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' var_args="-var='proxmox_host_list=[$node_list_str]'" From f6a980cbb1e6d269d849333293208bcea079b499 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 11:29:39 +0000 Subject: [PATCH 51/78] have vars as input --- .../workflows/integration-build-platform.yml | 40 ++++++------------- 1 file changed, 12 insertions(+), 28 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 55e43042..59980df1 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -280,20 +280,19 @@ jobs: node_list_str=${multiply_node::-1} set -x var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + echo "proxmox_host_list=[$node_list_str]">>input.tfvars + echo "fleet_peer_nr=${{ inputs.fleet_number_members }}">>input.tfvars + echo "fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}">>input.tfvars + echo "fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}">>input.tfvars + echo "ip_list=${{ inputs.fleet_ips }}">>input.tfvars + echo "proxmox_ve_username=${{ secrets.proxmox_ve_username }}">>input.tfvars + echo "proxmox_ve_password=${{ secrets.proxmox_ve_password }}">>input.tfvars - var_args="-var='proxmox_host_list=[$node_list_str]'" - var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" - var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" - var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" - var_args+=" -var='ip_list=${{ inputs.fleet_ips }}'" - var_args+=" -var='proxmox_ve_username=${{ secrets.proxmox_ve_username }}'" - var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" - - echo "Var args: $var_args" echo "File args: $var_file_arg" + echo "Input File args: $(cat input.tfvars)" terraform init -backend-config="key=hel-fleet-${{ steps.infra_names.outputs.manager_prefix }}.tfstate" - terraform apply -auto-approve $var_file_arg $var_args - terraform refresh $var_file_arg $var_args + terraform apply -auto-approve $var_file_arg -var-file=input.tfvars + terraform refresh $var_file_arg -var-file=input.tfvars - name: Prepare Devops provisioning slack message if: always() @@ -559,23 +558,8 @@ jobs: if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} shell: bash run: | - total_resources=${{ inputs.fleet_number_members }} - ((total_resources+=1)) - multiply_node=$(printf '"hel",%.0s' {1..$total_resources}) - node_list_str=${multiply_node::-1} - var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars"' - - var_args="-var='proxmox_host_list=[$node_list_str]'" \ - var_args+=" -var='fleet_peer_nr=${{ inputs.fleet_number_members }}'" - var_args+=" -var='fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}'" - var_args+=" -var='fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}'" - var_args+=" -var='ip_list=${{ inputs.fleet_ips }}'" - var_args+=" -var='proxmox_ve_username=${{ secrets.proxmox_ve_username }}'" - var_args+=" -var='proxmox_ve_password=${{ secrets.proxmox_ve_password }}'" - - echo "Var args: $var_args" - - terraform destroy -auto-approve $var_file_arg $var_args + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + terraform destroy -auto-approve $var_file_arg -var-file=input.tfvars From 72a0417c3e5b95c5a1126960721e68511dfa73f7 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 12:37:49 +0000 Subject: [PATCH 52/78] have vars as input --- .github/workflows/integration-build-platform.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 59980df1..332f0879 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -280,13 +280,15 @@ jobs: node_list_str=${multiply_node::-1} set -x var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' + echo "proxmox_host_list=[$node_list_str]">>input.tfvars echo "fleet_peer_nr=${{ inputs.fleet_number_members }}">>input.tfvars echo "fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}">>input.tfvars echo "fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}">>input.tfvars - echo "ip_list=${{ inputs.fleet_ips }}">>input.tfvars + echo 'ip_list=${{ inputs.fleet_ips }}'>>input.tfvars echo "proxmox_ve_username=${{ secrets.proxmox_ve_username }}">>input.tfvars echo "proxmox_ve_password=${{ secrets.proxmox_ve_password }}">>input.tfvars + echo "">>input echo "File args: $var_file_arg" echo "Input File args: $(cat input.tfvars)" From 0fdb61a2be0d5cd14ab477fa391fdbb0c89d3240 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 12:46:04 +0000 Subject: [PATCH 53/78] have vars as input --- .github/workflows/integration-build-platform.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 332f0879..57596147 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -286,8 +286,8 @@ jobs: echo "fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}">>input.tfvars echo "fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}">>input.tfvars echo 'ip_list=${{ inputs.fleet_ips }}'>>input.tfvars - echo "proxmox_ve_username=${{ secrets.proxmox_ve_username }}">>input.tfvars - echo "proxmox_ve_password=${{ secrets.proxmox_ve_password }}">>input.tfvars + echo 'proxmox_ve_username=${{ secrets.proxmox_ve_username }}'>>input.tfvars + echo 'proxmox_ve_password=${{ secrets.proxmox_ve_password }}'>>input.tfvars echo "">>input echo "File args: $var_file_arg" From 56847c818ac2011cf154c4bb9adebcaa7398b02e Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 12:54:41 +0000 Subject: [PATCH 54/78] have vars as input --- .github/workflows/integration-build-platform.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 57596147..4c74feca 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -286,9 +286,9 @@ jobs: echo "fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}">>input.tfvars echo "fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}">>input.tfvars echo 'ip_list=${{ inputs.fleet_ips }}'>>input.tfvars - echo 'proxmox_ve_username=${{ secrets.proxmox_ve_username }}'>>input.tfvars - echo 'proxmox_ve_password=${{ secrets.proxmox_ve_password }}'>>input.tfvars - echo "">>input + echo 'proxmox_ve_username="${{ secrets.proxmox_ve_username }}"'>>input.tfvars + echo 'proxmox_ve_password="${{ secrets.proxmox_ve_password }}"'>>input.tfvars + echo "\n">>input echo "File args: $var_file_arg" echo "Input File args: $(cat input.tfvars)" From 1dca27c8277aeb88a07a951ccb4dc16a861f43a6 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 12:58:58 +0000 Subject: [PATCH 55/78] have vars as input --- .github/workflows/integration-build-platform.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 4c74feca..8539fa53 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -283,8 +283,8 @@ jobs: echo "proxmox_host_list=[$node_list_str]">>input.tfvars echo "fleet_peer_nr=${{ inputs.fleet_number_members }}">>input.tfvars - echo "fleet_manager_name=${{ steps.infra_names.outputs.manager_prefix }}">>input.tfvars - echo "fleet_peer_name_prefix=${{ steps.infra_names.outputs.worker_prefix }}">>input.tfvars + echo 'fleet_manager_name="${{ steps.infra_names.outputs.manager_prefix }}"'>>input.tfvars + echo 'fleet_peer_name_prefix="${{ steps.infra_names.outputs.worker_prefix }}"'>>input.tfvars echo 'ip_list=${{ inputs.fleet_ips }}'>>input.tfvars echo 'proxmox_ve_username="${{ secrets.proxmox_ve_username }}"'>>input.tfvars echo 'proxmox_ve_password="${{ secrets.proxmox_ve_password }}"'>>input.tfvars From 7cb5f0fe79cb8081f2a0a9fe9efce6040d0ecc36 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:01:55 +0000 Subject: [PATCH 56/78] have vars as input --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 8539fa53..b974f492 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-1 + provision_infra_version=0.0.2-2 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir From d666ee3a55e6b43e0d94456df5c0968d07f15343 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:11:14 +0000 Subject: [PATCH 57/78] have vars as input --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index b974f492..3e931577 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-2 + provision_infra_version=0.0.2-3 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir From 850cd6c712b46f4a4a0c292aa267c9e2161c9432 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:17:19 +0000 Subject: [PATCH 58/78] have vars as input --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 3e931577..95dbac7c 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -276,7 +276,7 @@ jobs: working-directory: ${{ steps.provision_infra_setup.outputs.target_dir }} shell: bash run: | - multiply_node=$(printf '"hel",%.0s' {1${{ steps.infra_names.outputs.total_resources }}}) + multiply_node=$(printf '"hel",%.0s' {1..${{ steps.infra_names.outputs.total_resources }}}) node_list_str=${multiply_node::-1} set -x var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' From 0178f02d4875ffbdcd04e23b3553d645b49a0516 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:20:17 +0000 Subject: [PATCH 59/78] have vars as input --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 95dbac7c..158a5932 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-3 + provision_infra_version=0.0.2-4 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir From 3403e78e26c8ed9149b082dafa04a88ff604be88 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:27:51 +0000 Subject: [PATCH 60/78] bump env conf repo --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 158a5932..4d51b1d8 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -232,7 +232,7 @@ jobs: shell: bash run: | env_configs_dir=infra_env_configs - env_configs_version=0.0.0-1 + env_configs_version=0.0.0-2 env_configs_repo_name=devops-tf-env-conf rm -rf $env_configs_dir From 4aba221caaf82735670776932bc8e23e363f9b26 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:31:59 +0000 Subject: [PATCH 61/78] adapt to new inventory filename --- .github/workflows/integration-build-platform.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 4d51b1d8..a37cda4b 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -317,9 +317,9 @@ jobs: - name: Apply ansible inventory shell: bash run: | - cp ${{ steps.provision_infra_setup.outputs.target_dir }}/hosts staging/hosts + cp ${{ steps.provision_infra_setup.outputs.target_dir }}/provisioned_inventory.yml staging/provisioned_inventory.yml export PATH="$HOME/.local/bin:$PATH" - integration-pipeline get_yml_value --file staging/hosts --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt + integration-pipeline get_yml_value --file staging/provisioned_inventory.yml --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt - name: Setup ansible installation id: ansible_install_setup @@ -394,7 +394,7 @@ jobs: done ansible-playbook install.yml \ - -i ../staging/hosts \ + -i ../staging/provisioned_inventory.yml \ --key-file ~/.ssh/aws_slave.pem \ --extra-vars=@"$(pwd)/.."/product-manifest.yaml \ -e fleet_domain_dns="" \ @@ -529,13 +529,13 @@ jobs: source ansible-venv/bin/activate # install fleet_tests artifacts for fleet_host in "manager" "member0" "member1"; do - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u movai-service --since "1hour ago"' > fleet_qa_artifacts/install/$fleet_host.log || true + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u movai-service --since "1hour ago"' > fleet_qa_artifacts/install/$fleet_host.log || true echo "From $fleet_host:" - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'docker ps -a' > fleet_qa_artifacts/install/$fleet_host-docker_ps.log || true + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'docker ps -a' > fleet_qa_artifacts/install/$fleet_host-docker_ps.log || true echo "$(tail -n +2 fleet_qa_artifacts/install/$fleet_host-docker_ps.log )" - ansible $fleet_host -i ../staging/hosts --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u docker --boot --lines=all' > fleet_qa_artifacts/install/$fleet_host-all-docker.log || true + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'journalctl -u docker --boot --lines=all' > fleet_qa_artifacts/install/$fleet_host-all-docker.log || true done deactivate From 2db2637c66f1f5a14524a26431fe854fad14bca7 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 13:43:52 +0000 Subject: [PATCH 62/78] bump env confs --- .github/workflows/integration-build-platform.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index a37cda4b..85952f5c 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -232,7 +232,7 @@ jobs: shell: bash run: | env_configs_dir=infra_env_configs - env_configs_version=0.0.0-2 + env_configs_version=0.0.0-3 env_configs_repo_name=devops-tf-env-conf rm -rf $env_configs_dir @@ -560,7 +560,7 @@ jobs: if: ${{ ( !inputs.debug_fleet_keep_alive && success() ) || cancelled() || ( !inputs.debug_fleet_keep_alive && failure() ) }} shell: bash run: | - + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' terraform destroy -auto-approve $var_file_arg -var-file=input.tfvars From 9d8016d1f4fbdac248ce2227c40bffa4374b22b0 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 14:19:59 +0000 Subject: [PATCH 63/78] bump tf version --- .github/workflows/integration-build-platform.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 85952f5c..01ec128c 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-4 + provision_infra_version=0.0.2-5 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir @@ -318,6 +318,7 @@ jobs: shell: bash run: | cp ${{ steps.provision_infra_setup.outputs.target_dir }}/provisioned_inventory.yml staging/provisioned_inventory.yml + cat staging/provisioned_inventory.yml export PATH="$HOME/.local/bin:$PATH" integration-pipeline get_yml_value --file staging/provisioned_inventory.yml --key fleet.children.managers.hosts.manager.ansible_host --output_file ./staging/manager_private_ip.txt From c938320b3954a1901b1184124e98e2b189e49629 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 14:38:55 +0000 Subject: [PATCH 64/78] switch ensuring vm is ready mechanism --- .../workflows/integration-build-platform.yml | 35 +++++-------------- 1 file changed, 8 insertions(+), 27 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 01ec128c..43f8f0c0 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -348,29 +348,6 @@ jobs: shell: bash run: | - function ensure_agent_up(){ - vm_ip=$1 - i="0" - max=15 - success=1 - while [ $success -ne 0 ] - do - echo "Checking if $vm_ip is reachable ($i/$max)" - ping -c1 $vm_ip &>/dev/null - success=$? - - if [ $i -lt $max ] - then - i=$[$i+1] - else - echo "Timeout waiting for $vm_ip" - exit 2 - fi - - sleep 2 - done - - } echo "${{ secrets.ssh_pem_fleet_aws_vm }}" > ~/.ssh/aws_slave.pem sudo chmod 600 ~/.ssh/aws_slave.pem while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 ; do echo Waiting for other software managers to finish... ; sleep 5;done @@ -384,14 +361,18 @@ jobs: touch ~/.ssh/known_hosts sudo chmod 600 ~/.ssh/known_hosts IFS=',' read -r -a stripped_ips_arr <<< $stripped_ips - manager_ip=${stripped_ips_arr[0]} - echo $manager_ip - echo "manager_ip=${manager_ip}" >> $GITHUB_OUTPUT + for ip in "${stripped_ips_arr[@]}" do - ensure_agent_up $ip + if [[ $ip == *"/"* ]]; then + ip=${ip%/*} + fi ssh-keygen -f ~/.ssh/known_hosts -R $ip ssh-keyscan -H $ip >> ~/.ssh/known_hosts + ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' + set -e + cloud-init status --wait + ' done ansible-playbook install.yml \ From ff2fac63f421a9fb6d3bfe05189f86eee5f40f25 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 14:58:08 +0000 Subject: [PATCH 65/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 43f8f0c0..d537d2aa 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -372,6 +372,9 @@ jobs: ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' set -e cloud-init status --wait + # ensure all file descriptors are set to blocking + python3 -c 'import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)' + ' done From 4b1bfa83b17100ca0e8ffa66587d19bc1f2a9752 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 15:03:30 +0000 Subject: [PATCH 66/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index d537d2aa..cc3721d6 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -373,7 +373,7 @@ jobs: set -e cloud-init status --wait # ensure all file descriptors are set to blocking - python3 -c 'import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)' + python3 -c \'import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)\' ' done From e0f1edf74cedd3d079205326781f2b27d7f05bcb Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 15:13:54 +0000 Subject: [PATCH 67/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index cc3721d6..9592b6f3 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -373,9 +373,8 @@ jobs: set -e cloud-init status --wait # ensure all file descriptors are set to blocking - python3 -c \'import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)\' - - ' + python3 -c '\''import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)'\' + ' done ansible-playbook install.yml \ From 2e391aef9a46acf9a7c729f2552e7d95f320f5ae Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 15:15:09 +0000 Subject: [PATCH 68/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 9592b6f3..d37c3b33 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -373,8 +373,7 @@ jobs: set -e cloud-init status --wait # ensure all file descriptors are set to blocking - python3 -c '\''import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)'\' - ' + python3 -c "import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)" ' done ansible-playbook install.yml \ From 8831f92bc5e3b4191a976e18d504a5bfe2be6f59 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 15:24:15 +0000 Subject: [PATCH 69/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index d37c3b33..7d695de4 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -372,10 +372,11 @@ jobs: ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' set -e cloud-init status --wait - # ensure all file descriptors are set to blocking - python3 -c "import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)" ' done + # ensure all file descriptors are set to blocking + python3 -c "import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)" ' + ansible-playbook install.yml \ -i ../staging/provisioned_inventory.yml \ --key-file ~/.ssh/aws_slave.pem \ From 5e90119536fb9ad64b139443400af4331bc4cbe5 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 15:29:20 +0000 Subject: [PATCH 70/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 7d695de4..1b7e2f47 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -372,6 +372,7 @@ jobs: ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' set -e cloud-init status --wait + ' done # ensure all file descriptors are set to blocking From 3eb97ad04fecf64066daefc6c61238628624e298 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 15:47:37 +0000 Subject: [PATCH 71/78] attempt to force file descriptors blocking --- .github/workflows/integration-build-platform.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 1b7e2f47..ae3a9190 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -374,10 +374,7 @@ jobs: cloud-init status --wait ' done - - # ensure all file descriptors are set to blocking - python3 -c "import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)" ' - + python3 -c 'import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)' ansible-playbook install.yml \ -i ../staging/provisioned_inventory.yml \ --key-file ~/.ssh/aws_slave.pem \ From 690e5fcead66df6c4f4d753d89894a90e6041690 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 16:05:04 +0000 Subject: [PATCH 72/78] bump tf --- .github/workflows/integration-build-platform.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index ae3a9190..675bea2a 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-5 + provision_infra_version=0.0.2-6 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir From cc9360d399f2569970c017fca5db8f6394e89bd6 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Wed, 27 Mar 2024 16:27:16 +0000 Subject: [PATCH 73/78] fix manager ip exposal --- .github/workflows/integration-build-platform.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 675bea2a..0f4d5611 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -362,6 +362,10 @@ jobs: sudo chmod 600 ~/.ssh/known_hosts IFS=',' read -r -a stripped_ips_arr <<< $stripped_ips + manager_ip=${stripped_ips_arr[0]} + echo $manager_ip + echo "manager_ip=${manager_ip}" >> $GITHUB_OUTPUT + for ip in "${stripped_ips_arr[@]}" do if [[ $ip == *"/"* ]]; then From 3f82771474474d811f68950ef20b55276ccc1198 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Thu, 28 Mar 2024 11:08:24 +0000 Subject: [PATCH 74/78] try again without file descriptor hammer --- .github/workflows/integration-build-platform.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 0f4d5611..293087cf 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -278,7 +278,7 @@ jobs: run: | multiply_node=$(printf '"hel",%.0s' {1..${{ steps.infra_names.outputs.total_resources }}}) node_list_str=${multiply_node::-1} - set -x + var_file_arg='-var-file=../${{ steps.infra_env_configs_setup.outputs.target_dir }}/hel/hel_fleet_test.tfvars' echo "proxmox_host_list=[$node_list_str]">>input.tfvars @@ -378,7 +378,6 @@ jobs: cloud-init status --wait ' done - python3 -c 'import fcntl, sys; fcntl.fcntl(sys.stdin.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, 0); fcntl.fcntl(sys.stderr.fileno(), fcntl.F_SETFL, 0)' ansible-playbook install.yml \ -i ../staging/provisioned_inventory.yml \ --key-file ~/.ssh/aws_slave.pem \ From 31bd6f0b6aaf35aaa28828609db6dff15fe8858e Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Thu, 28 Mar 2024 11:19:23 +0000 Subject: [PATCH 75/78] attempt not using the ssh call --- .github/workflows/integration-build-platform.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 293087cf..59363398 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -373,10 +373,10 @@ jobs: fi ssh-keygen -f ~/.ssh/known_hosts -R $ip ssh-keyscan -H $ip >> ~/.ssh/known_hosts - ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' - set -e - cloud-init status --wait - ' + #ssh devops@${ip} -i ~/.ssh/aws_slave.pem -o StrictHostKeyChecking=no ' + # set -e + # cloud-init status --wait + #' done ansible-playbook install.yml \ -i ../staging/provisioned_inventory.yml \ From ac35b4a53b97cc2e346e0a48b372514d6048a439 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Thu, 28 Mar 2024 11:43:05 +0000 Subject: [PATCH 76/78] swtich from ssh to ansible to see descriptor complain --- .github/workflows/integration-build-platform.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index 59363398..ccafaf00 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -378,6 +378,13 @@ jobs: # cloud-init status --wait #' done + + # Ensure cloud init is done on all the hosts + for fleet_host in "manager" "member0" "member1"; do + ansible $fleet_host -i ../staging/provisioned_inventory.yml --key-file ~/.ssh/aws_slave.pem -m shell -a 'cloud-init status --wait' + done + + # Start the installation ansible-playbook install.yml \ -i ../staging/provisioned_inventory.yml \ --key-file ~/.ssh/aws_slave.pem \ From 89a9198cb4046b78de5827be73dcde9356decdf8 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Thu, 28 Mar 2024 18:30:41 +0000 Subject: [PATCH 77/78] lets roll --- .github/workflows/integration-build-platform.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index ccafaf00..b1c7b009 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -232,7 +232,7 @@ jobs: shell: bash run: | env_configs_dir=infra_env_configs - env_configs_version=0.0.0-3 + env_configs_version=0.0.1-2 env_configs_repo_name=devops-tf-env-conf rm -rf $env_configs_dir @@ -246,7 +246,7 @@ jobs: shell: bash run: | provision_infra_dir=provision_scripts - provision_infra_version=0.0.2-6 + provision_infra_version=1.0.0-2 provision_infra_repo_name=devops-tf-proxmox-bpg rm -rf $provision_infra_dir From cd9311d17c877c6ecd7fcfd1da131da9ee556560 Mon Sep 17 00:00:00 2001 From: duartecoelhomovai Date: Thu, 28 Mar 2024 18:37:29 +0000 Subject: [PATCH 78/78] lets roll --- .github/workflows/integration-build-platform.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/integration-build-platform.yml b/.github/workflows/integration-build-platform.yml index b1c7b009..9ac94bbd 100644 --- a/.github/workflows/integration-build-platform.yml +++ b/.github/workflows/integration-build-platform.yml @@ -284,6 +284,7 @@ jobs: echo "proxmox_host_list=[$node_list_str]">>input.tfvars echo "fleet_peer_nr=${{ inputs.fleet_number_members }}">>input.tfvars echo 'fleet_manager_name="${{ steps.infra_names.outputs.manager_prefix }}"'>>input.tfvars + echo 'fleet_password="n/a"'>>input.tfvars echo 'fleet_peer_name_prefix="${{ steps.infra_names.outputs.worker_prefix }}"'>>input.tfvars echo 'ip_list=${{ inputs.fleet_ips }}'>>input.tfvars echo 'proxmox_ve_username="${{ secrets.proxmox_ve_username }}"'>>input.tfvars