From b53e97a65eec6037b268736301b9158683d72375 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 5 Jun 2020 21:01:55 -0700 Subject: [PATCH 001/155] Added Google Compute support. Reorganized all Linux Packer scripts and files. --- packer/build-linux.json | 101 ------------ packer/build-rhel-6.json | 94 ----------- packer/build-rhel-7.json | 146 ++++++++++++++++++ packer/build-rhel-8.json | 145 +++++++++++++++++ packer/build-solaris-sparc.json | 74 --------- packer/build-solaris-x86.json | 74 --------- packer/build-ubuntu-16.04.json | 126 +++++++++++++++ packer/build-ubuntu-18.04.json | 126 +++++++++++++++ packer/build-ubuntu.json | 90 ----------- packer/default.json | 8 +- packer/dev-solaris-sparc.json | 38 ----- packer/dev-solaris-x86.json | 38 ----- packer/dev-windows-2012-r2.json | 76 --------- .../aws/files/usr/local/bin}/init-user.sh | 0 .../aws/setup-files.sh} | 7 +- .../aws}/wait-for-cloud-init.sh | 0 .../etc/systemd/system/update-hosts.service} | 19 ++- .../files/usr/local/bin/update-hosts.sh} | 11 +- packer/{rhel => linux}/install-cmake.sh | 4 +- .../install-geode.sh} | 11 +- .../install-cmake.sh => linux/setup-files.sh} | 11 +- packer/rhel-7-base.json | 48 ------ .../7/install-devtoolset.sh} | 6 +- packer/rhel/{ => aws}/add-user-build.sh | 0 .../files/etc/cloud/cloud.cfg.d/10_hosts.cfg | 0 .../cleanup.sh => rhel/aws/setup-files.sh} | 4 +- packer/rhel/install-coverage-tools.sh | 24 --- .../{install-repos.sh => install-jdk-11.sh} | 12 +- packer/rhel/install-scl-devtoolset.sh | 23 --- packer/rhel/install-scl-python27.sh | 24 --- packer/solaris/changepasswd | 37 ----- packer/solaris/install-build-tools.sh | 36 ----- packer/solaris/install-cmake.sh | 36 ----- packer/solaris/install-opencsw.sh | 27 ---- packer/solaris/install-solarisstudio.sh | 34 ---- packer/solaris/install-test-tools.sh | 26 ---- packer/solaris/update.sh | 22 --- packer/test-rhel-7.json | 97 ------------ packer/test-solaris-x86.json | 71 --------- packer/test-windows-2012-r2.json | 88 ----------- packer/test-windows-2012.json | 88 ----------- packer/ubuntu-16.04-base.json | 48 ------ packer/ubuntu/{ => aws}/add-user-build.sh | 0 packer/ubuntu/cleanup.sh | 1 - packer/ubuntu/install-clang-format.sh | 8 +- packer/ubuntu/install-gemfire.sh | 22 --- .../install-jdk-11.sh} | 7 +- packer/ubuntu/install-packages.sh | 33 ---- packer/ubuntu/wait-for-cloud-init.sh | 26 ---- 49 files changed, 606 insertions(+), 1441 deletions(-) delete mode 100644 packer/build-linux.json delete mode 100644 packer/build-rhel-6.json create mode 100644 packer/build-rhel-7.json create mode 100644 packer/build-rhel-8.json delete mode 100644 packer/build-solaris-sparc.json delete mode 100644 packer/build-solaris-x86.json create mode 100644 packer/build-ubuntu-16.04.json create mode 100644 packer/build-ubuntu-18.04.json delete mode 100644 packer/build-ubuntu.json delete mode 100644 packer/dev-solaris-sparc.json delete mode 100644 packer/dev-solaris-x86.json delete mode 100644 packer/dev-windows-2012-r2.json rename packer/{rhel/files/etc => linux/aws/files/usr/local/bin}/init-user.sh (100%) rename packer/{rhel/install-gemfire.sh => linux/aws/setup-files.sh} (91%) rename packer/{rhel => linux/aws}/wait-for-cloud-init.sh (100%) rename packer/{solaris/add-user-build.sh => linux/files/etc/systemd/system/update-hosts.service} (76%) rename packer/{solaris/install-gemfire.sh => linux/files/usr/local/bin/update-hosts.sh} (84%) mode change 100755 => 100644 rename packer/{rhel => linux}/install-cmake.sh (87%) rename packer/{rhel/install-jdk-1.8.sh => linux/install-geode.sh} (75%) rename packer/{ubuntu/install-cmake.sh => linux/setup-files.sh} (82%) delete mode 100644 packer/rhel-7-base.json rename packer/{ubuntu/install-coverage-tools.sh => rhel/7/install-devtoolset.sh} (85%) rename packer/rhel/{ => aws}/add-user-build.sh (100%) rename packer/rhel/{ => aws}/files/etc/cloud/cloud.cfg.d/10_hosts.cfg (100%) rename packer/{solaris/cleanup.sh => rhel/aws/setup-files.sh} (92%) mode change 100755 => 100644 delete mode 100644 packer/rhel/install-coverage-tools.sh rename packer/rhel/{install-repos.sh => install-jdk-11.sh} (77%) delete mode 100644 packer/rhel/install-scl-devtoolset.sh delete mode 100644 packer/rhel/install-scl-python27.sh delete mode 100755 packer/solaris/changepasswd delete mode 100755 packer/solaris/install-build-tools.sh delete mode 100644 packer/solaris/install-cmake.sh delete mode 100644 packer/solaris/install-opencsw.sh delete mode 100644 packer/solaris/install-solarisstudio.sh delete mode 100755 packer/solaris/install-test-tools.sh delete mode 100755 packer/solaris/update.sh delete mode 100644 packer/test-rhel-7.json delete mode 100644 packer/test-solaris-x86.json delete mode 100644 packer/test-windows-2012-r2.json delete mode 100644 packer/test-windows-2012.json delete mode 100644 packer/ubuntu-16.04-base.json rename packer/ubuntu/{ => aws}/add-user-build.sh (100%) delete mode 100644 packer/ubuntu/install-gemfire.sh rename packer/{rhel/install-build-rpms.sh => ubuntu/install-jdk-11.sh} (76%) delete mode 100644 packer/ubuntu/install-packages.sh delete mode 100644 packer/ubuntu/wait-for-cloud-init.sh diff --git a/packer/build-linux.json b/packer/build-linux.json deleted file mode 100644 index 7b47588fe8..0000000000 --- a/packer/build-linux.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"", - "source_image_name":"", - "image_name":"build-linux" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.micro", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "ssh_username":"ec2-user", - "ssh_pty":true - } - ], - "provisioners":[ - { - "type":"shell", - "script":"rhel/wait-for-cloud-init.sh" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/update.sh" - ] - }, - { - "type":"file", - "source":"rhel/files", - "destination":"/tmp" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "inline":[ - "cp -rv /tmp/files/* /", - "rm -rf /tmp/files", - "chmod +x /etc/init-user.sh" - ] - }, - { - "type":"file", - "source":"rhel/init-hosts.rc.local", - "destination":"/tmp/init-hosts.rc.local" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "inline":[ - "cat /tmp/init-hosts.rc.local >> /etc/rc.local" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/disable-selinux.sh", - "rhel/add-user-build.sh", - "rhel/install-repos.sh", - "rhel/install-scl-devtoolset.sh", - "rhel/install-scl-python27.sh", - "rhel/install-build-rpms.sh", - "rhel/install-coverage-tools.sh", - "rhel/install-cmake.sh", - "rhel/install-jdk-1.8.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/install-gemfire.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/cleanup.sh" - ] - } - ] -} diff --git a/packer/build-rhel-6.json b/packer/build-rhel-6.json deleted file mode 100644 index cbd60045b2..0000000000 --- a/packer/build-rhel-6.json +++ /dev/null @@ -1,94 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"ami-80296ff8", - "source_image_name":"", - "image_name":"build-rhel6" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.micro", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "launch_block_device_mappings":[ - { - "device_name":"/dev/sda1", - "delete_on_termination":true, - "volume_type": "gp2", - "volume_size":100 - } - ], - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "ssh_username":"ec2-user", - "ssh_pty":true - } - ], - "provisioners":[ - { - "type":"shell", - "script":"rhel/wait-for-cloud-init.sh" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/update.sh" - ] - }, - { - "type":"file", - "source":"rhel/files", - "destination":"/tmp" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "inline":[ - "cp -rv /tmp/files/* /", - "rm -rf /tmp/files", - "chmod +x /etc/init-user.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/disable-selinux.sh", - "rhel/add-user-build.sh", - "rhel/install-scl-devtoolset.sh", - "rhel/install-build-rpms.sh", - "rhel/install-cmake.sh", - "rhel/install-jdk-1.8.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/install-gemfire.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/cleanup.sh" - ] - } - ] -} diff --git a/packer/build-rhel-7.json b/packer/build-rhel-7.json new file mode 100644 index 0000000000..8efeaaefec --- /dev/null +++ b/packer/build-rhel-7.json @@ -0,0 +1,146 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "build-rhel-7" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "t2.micro", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "ssh_username": "ec2-user", + "ssh_pty": true + }, + { + "type": "googlecompute", + "project_id": "{{user `googlecompute_project`}}", + "source_image": "{{user `source_image`}}", + "ssh_username": "packer", + "zone": "{{user `googlecompute_zone`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + } + } + ], + "provisioners": [ + { + "type": "shell", + "script": "linux/aws/wait-for-cloud-init.sh", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/update.sh" + ] + }, + { + "type": "file", + "source": "linux/files", + "destination": "/tmp" + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/setup-files.sh" + ] + }, + { + "type": "file", + "source": "linux/aws/files", + "destination": "/tmp", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/aws/setup-files.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "file", + "source": "rhel/aws/files", + "destination": "/tmp", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/aws/setup-files.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/disable-selinux.sh", + "rhel/7/install-devtoolset.sh", + "rhel/install-jdk-11.sh", + "linux/install-cmake.sh", + "linux/install-geode.sh" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "inline": [ + "yum install -y git make zlib-devel patch openssl-devel", + "yum install -y python3-pip", + "pip3 install cpp-coveralls", + "yum --enablerepo '*codeready*' install -y doxygen" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/aws/add-user-build.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/cleanup.sh" + ] + } + ] +} diff --git a/packer/build-rhel-8.json b/packer/build-rhel-8.json new file mode 100644 index 0000000000..2b59616bbc --- /dev/null +++ b/packer/build-rhel-8.json @@ -0,0 +1,145 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "build-rhel-8" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "t2.micro", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "ssh_username": "ec2-user", + "ssh_pty": true + }, + { + "type": "googlecompute", + "project_id": "{{user `googlecompute_project`}}", + "source_image": "{{user `source_image`}}", + "ssh_username": "packer", + "zone": "{{user `googlecompute_zone`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + } + } + ], + "provisioners": [ + { + "type": "shell", + "script": "linux/aws/wait-for-cloud-init.sh", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/update.sh" + ] + }, + { + "type": "file", + "source": "linux/files", + "destination": "/tmp" + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/setup-files.sh" + ] + }, + { + "type": "file", + "source": "linux/aws/files", + "destination": "/tmp", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/aws/setup-files.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "file", + "source": "rhel/aws/files", + "destination": "/tmp", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/aws/setup-files.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/disable-selinux.sh", + "rhel/install-jdk-11.sh", + "linux/install-cmake.sh", + "linux/install-geode.sh" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "inline": [ + "yum install -y git make gcc-c++ zlib-devel patch openssl-devel", + "yum install -y python3-pip", + "pip3 install cpp-coveralls", + "yum --enablerepo '*codeready*' install -y doxygen" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/aws/add-user-build.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} /bin/sudo -E -S bash '{{.Path}}'", + "scripts": [ + "rhel/cleanup.sh" + ] + } + ] +} diff --git a/packer/build-solaris-sparc.json b/packer/build-solaris-sparc.json deleted file mode 100644 index 0ce5eecc71..0000000000 --- a/packer/build-solaris-sparc.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "variables":{ - "image_name":"build-solaris-sparc", - "openstack_source_image":"", - "openstack_flavor":"Oracle Solaris non-global zone - tiny", - "vmware_source_image_name":"", - "gemfire_archive":"gemfire.tar.gz", - "pkg_oracle_com_certificate":"pkg.oracle.com.certificate.pem", - "pkg_oracle_com_key":"pkg.oracle.com.key.pem" - }, - "builders":[ - { - "type":"openstack", - "identity_endpoint":"{{user `openstack_identity_endpoint`}}", - "tenant_name":"{{user `openstack_tenant_name`}}", - "username":"{{user `openstack_username`}}", - "password":"{{user `openstack_password`}}", - "region":"{{user `openstack_region`}}", - "ssh_username":"root", - "image_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "source_image":"{{user `openstack_source_image`}}", - "flavor":"{{user `openstack_flavor`}}", - "insecure":"true" - } - ], - "provisioners":[ - { - "type":"file", - "source":"{{user `pkg_oracle_com_certificate`}}", - "destination":"/var/pkg/ssl/pkg.oracle.com.certificate.pem" - }, - { - "type":"file", - "source":"{{user `pkg_oracle_com_key`}}", - "destination":"/var/pkg/ssl/pkg.oracle.com.key.pem" - }, - { - "type":"shell", - "scripts":[ - "solaris/install-opencsw.sh", - "solaris/install-build-tools.sh", - "solaris/install-solarisstudio.sh", - "solaris/install-cmake.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "scripts":[ - "solaris/install-gemfire.sh" - ] - }, - { - "type":"file", - "source":"solaris/changepasswd", - "destination":"changepasswd" - }, - { - "type":"shell", - "scripts":"solaris/add-user-build.sh" - }, - { - "type":"shell", - "scripts":[ - "solaris/update.sh", - "solaris/cleanup.sh" - ] - } - ] -} diff --git a/packer/build-solaris-x86.json b/packer/build-solaris-x86.json deleted file mode 100644 index 6a55c825af..0000000000 --- a/packer/build-solaris-x86.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "variables":{ - "image_name":"build-solaris-x86", - "openstack_source_image":"", - "openstack_flavor":"Oracle Solaris non-global zone - tiny", - "vmware_source_image_name":"", - "gemfire_archive":"gemfire.tar.gz", - "pkg_oracle_com_certificate":"pkg.oracle.com.certificate.pem", - "pkg_oracle_com_key":"pkg.oracle.com.key.pem" - }, - "builders":[ - { - "type":"openstack", - "identity_endpoint":"{{user `openstack_identity_endpoint`}}", - "tenant_name":"{{user `openstack_tenant_name`}}", - "username":"{{user `openstack_username`}}", - "password":"{{user `openstack_password`}}", - "region":"{{user `openstack_region`}}", - "ssh_username":"root", - "image_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "source_image":"{{user `openstack_source_image`}}", - "flavor":"{{user `openstack_flavor`}}", - "insecure":"true" - } - ], - "provisioners":[ - { - "type":"file", - "source":"{{user `pkg_oracle_com_certificate`}}", - "destination":"/var/pkg/ssl/pkg.oracle.com.certificate.pem" - }, - { - "type":"file", - "source":"{{user `pkg_oracle_com_key`}}", - "destination":"/var/pkg/ssl/pkg.oracle.com.key.pem" - }, - { - "type":"shell", - "scripts":[ - "solaris/install-opencsw.sh", - "solaris/install-build-tools.sh", - "solaris/install-solarisstudio.sh", - "solaris/install-cmake.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "scripts":[ - "solaris/install-gemfire.sh" - ] - }, - { - "type":"file", - "source":"solaris/changepasswd", - "destination":"changepasswd" - }, - { - "type":"shell", - "scripts":"solaris/add-user-build.sh" - }, - { - "type":"shell", - "scripts":[ - "solaris/update.sh", - "solaris/cleanup.sh" - ] - } - ] -} diff --git a/packer/build-ubuntu-16.04.json b/packer/build-ubuntu-16.04.json new file mode 100644 index 0000000000..d87d539d34 --- /dev/null +++ b/packer/build-ubuntu-16.04.json @@ -0,0 +1,126 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "build-ubuntu-16-04" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "t2.micro", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "ssh_username": "ubuntu", + "ssh_pty": true + }, + { + "type": "googlecompute", + "project_id": "{{user `googlecompute_project`}}", + "source_image": "{{user `source_image`}}", + "ssh_username": "packer", + "zone": "{{user `googlecompute_zone`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + } + } + ], + "provisioners": [ + { + "type": "shell", + "script": "linux/aws/wait-for-cloud-init.sh", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "ubuntu/update.sh" + ] + }, + { + "type": "file", + "source": "linux/files", + "destination": "/tmp" + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/setup-files.sh" + ] + }, + { + "type": "file", + "source": "linux/aws/files", + "destination": "/tmp", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/aws/setup-files.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/install-cmake.sh", + "linux/install-geode.sh", + "ubuntu/install-clang-format.sh", + "ubuntu/install-jdk-11.sh" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "ubuntu/aws/add-user-build.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "inline": [ + "apt-get -y install build-essential libc++-dev libc++abi-dev zlib1g-dev libssl-dev wget doxygen graphviz python python-pip", + "pip install --upgrade pip", + "pip install cpp-coveralls" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "ubuntu/cleanup.sh" + ] + } + ] +} diff --git a/packer/build-ubuntu-18.04.json b/packer/build-ubuntu-18.04.json new file mode 100644 index 0000000000..b29bf3cd94 --- /dev/null +++ b/packer/build-ubuntu-18.04.json @@ -0,0 +1,126 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "build-ubuntu-18-04" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "t2.micro", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "ssh_username": "ubuntu", + "ssh_pty": true + }, + { + "type": "googlecompute", + "project_id": "{{user `googlecompute_project`}}", + "source_image": "{{user `source_image`}}", + "ssh_username": "packer", + "zone": "{{user `googlecompute_zone`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + } + } + ], + "provisioners": [ + { + "type": "shell", + "script": "linux/aws/wait-for-cloud-init.sh", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "ubuntu/update.sh" + ] + }, + { + "type": "file", + "source": "linux/files", + "destination": "/tmp" + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/setup-files.sh" + ] + }, + { + "type": "file", + "source": "linux/aws/files", + "destination": "/tmp", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/aws/setup-files.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "linux/install-cmake.sh", + "linux/install-geode.sh", + "ubuntu/install-clang-format.sh", + "ubuntu/install-jdk-11.sh" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "ubuntu/aws/add-user-build.sh" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "inline": [ + "apt-get -y install build-essential libc++-dev libc++abi-dev zlib1g-dev libssl-dev wget doxygen graphviz python python-pip", + "pip install --upgrade pip", + "pip install cpp-coveralls" + ] + }, + { + "type": "shell", + "execute_command": "{{.Vars}} sudo -E -S bash '{{.Path}}'", + "scripts": [ + "ubuntu/cleanup.sh" + ] + } + ] +} diff --git a/packer/build-ubuntu.json b/packer/build-ubuntu.json deleted file mode 100644 index bf8605514a..0000000000 --- a/packer/build-ubuntu.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"ami-08718fb38f5f50f34", - "source_image_name":"X.vmx", - "image_name":"build-ubuntu" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.micro", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "ssh_username":"ubuntu", - "ssh_pty":true - } - ], - "provisioners":[ - { - "type":"shell", - "script":"ubuntu/wait-for-cloud-init.sh" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "pause_before":"30s", - "scripts":[ - "ubuntu/update.sh" - ] - }, - { - "type":"file", - "source":"ubuntu/files", - "destination":"/tmp" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "inline":[ - "cp -rv /tmp/files/* /", - "rm -rf /tmp/files", - "chmod +x /etc/init-user.sh", - "chmod +x /usr/local/bin/update-hosts.sh", - "systemctl daemon-reload", - "systemctl enable update-hosts.service", - "systemctl start update-hosts.service" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "ubuntu/add-user-build.sh", - "ubuntu/install-packages.sh", - "ubuntu/install-coverage-tools.sh", - "ubuntu/install-cmake.sh", - "ubuntu/install-clang-format.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "ubuntu/install-gemfire.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "ubuntu/cleanup.sh" - ] - } - ] -} diff --git a/packer/default.json b/packer/default.json index 42666d00fe..d02af97e3a 100644 --- a/packer/default.json +++ b/packer/default.json @@ -1,5 +1,7 @@ { - "product_name":"native", - "region":"us-west-2", - "openstack_region":"RegionOne" + "product_name": "native", + "region": "us-west-2", + "aws_region": "us-west-2", + "googlecompute_project": "gemfire-dev", + "googlecompute_zone": "us-central1-c" } diff --git a/packer/dev-solaris-sparc.json b/packer/dev-solaris-sparc.json deleted file mode 100644 index 180e8e39c7..0000000000 --- a/packer/dev-solaris-sparc.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "variables":{ - "image_name":"dev-solaris-sparc", - "openstack_source_image":"", - "openstack_flavor":"Oracle Solaris non-global zone - tiny", - "vmware_source_image_name":"" - }, - "builders":[ - { - "type":"openstack", - "identity_endpoint":"{{user `openstack_identity_endpoint`}}", - "tenant_name":"{{user `openstack_tenant_name`}}", - "username":"{{user `openstack_username`}}", - "password":"{{user `openstack_password`}}", - "region":"{{user `openstack_region`}}", - "ssh_username":"root", - "image_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "source_image":"{{user `openstack_source_image`}}", - "flavor":"{{user `openstack_flavor`}}", - "insecure":"true" - } - ], - "provisioners":[ - { - "type":"shell", - "inline":[ - "/opt/csw/bin/pkgutil -i -y ccache" - ] - }, - { - "type":"shell", - "scripts":[ - "solaris/update.sh", - "solaris/cleanup.sh" - ] - } - ] -} diff --git a/packer/dev-solaris-x86.json b/packer/dev-solaris-x86.json deleted file mode 100644 index 783945e9bc..0000000000 --- a/packer/dev-solaris-x86.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "variables":{ - "image_name":"dev-solaris-x86", - "openstack_source_image":"", - "openstack_flavor":"Oracle Solaris non-global zone - tiny", - "vmware_source_image_name":"" - }, - "builders":[ - { - "type":"openstack", - "identity_endpoint":"{{user `openstack_identity_endpoint`}}", - "tenant_name":"{{user `openstack_tenant_name`}}", - "username":"{{user `openstack_username`}}", - "password":"{{user `openstack_password`}}", - "region":"{{user `openstack_region`}}", - "ssh_username":"root", - "image_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "source_image":"{{user `openstack_source_image`}}", - "flavor":"{{user `openstack_flavor`}}", - "insecure":"true" - } - ], - "provisioners":[ - { - "type":"shell", - "inline":[ - "/opt/csw/bin/pkgutil -i -y ccache" - ] - }, - { - "type":"shell", - "scripts":[ - "solaris/update.sh", - "solaris/cleanup.sh" - ] - } - ] -} diff --git a/packer/dev-windows-2012-r2.json b/packer/dev-windows-2012-r2.json deleted file mode 100644 index bd3f539309..0000000000 --- a/packer/dev-windows-2012-r2.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"", - "source_image_name":"X.vmx", - "image_name":"dev-windows-2016" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.large", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "communicator":"ssh", - "ssh_username":"Administrator", - "ssh_handshake_attempts":100, - "ssh_timeout":"15m", - "launch_block_device_mappings":[ - { - "device_name":"/dev/sda1", - "delete_on_termination":true, - "volume_size":100 - } - ] - } - ], - "provisioners":[ - { - "pause_before":"30s", - "type":"powershell", - "inline":[ - "choco install notepadplusplus -confirm", - "choco install googlechrome -confirm", - "choco install sysinternals -confirm", - "choco install windbg -confirm" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/install-dependencies-app-from-github.ps1" - ] - }, - { - "type":"powershell", - "inline":[ - "Remove-Item C:\\Users\\build\\.ssh -Recurse -Force -ErrorAction SilentlyContinue" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/cleanup.ps1" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/setup-ec2config.ps1" - ], - "only":[ - "amazon-ebs" - ] - } - ] -} diff --git a/packer/rhel/files/etc/init-user.sh b/packer/linux/aws/files/usr/local/bin/init-user.sh similarity index 100% rename from packer/rhel/files/etc/init-user.sh rename to packer/linux/aws/files/usr/local/bin/init-user.sh diff --git a/packer/rhel/install-gemfire.sh b/packer/linux/aws/setup-files.sh similarity index 91% rename from packer/rhel/install-gemfire.sh rename to packer/linux/aws/setup-files.sh index 64382af3fe..8087c3962c 100644 --- a/packer/rhel/install-gemfire.sh +++ b/packer/linux/aws/setup-files.sh @@ -17,6 +17,7 @@ set -x -e -o pipefail -mkdir /gemfire -tar -zxf gemfire.tar.gz -C /gemfire -rm gemfire.tar.gz +cp -rv /tmp/files/* / +rm -rf /tmp/files + +chmod +x /usr/local/bin/init-user.sh diff --git a/packer/rhel/wait-for-cloud-init.sh b/packer/linux/aws/wait-for-cloud-init.sh similarity index 100% rename from packer/rhel/wait-for-cloud-init.sh rename to packer/linux/aws/wait-for-cloud-init.sh diff --git a/packer/solaris/add-user-build.sh b/packer/linux/files/etc/systemd/system/update-hosts.service similarity index 76% rename from packer/solaris/add-user-build.sh rename to packer/linux/files/etc/systemd/system/update-hosts.service index 2ce3509983..cd954cee21 100644 --- a/packer/solaris/add-user-build.sh +++ b/packer/linux/files/etc/systemd/system/update-hosts.service @@ -1,5 +1,3 @@ -#!/usr/bin/env bash - # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. @@ -15,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -x -e -o pipefail - -useradd -m -s /usr/bin/bash build +[Unit] +Description=Ensure an A record exists for local hostname in /etc/hosts +After=network.target +After=network-online.target +Wants=network-online.target -if (! which expect > /dev/null); then - pkg install -v --accept shell/expect -fi +[Service] +ExecStart=/usr/local/bin/update-hosts.sh -chmod +x ./changepasswd -./changepasswd build p1votal! \ No newline at end of file +[Install] +WantedBy=multi-user.target diff --git a/packer/solaris/install-gemfire.sh b/packer/linux/files/usr/local/bin/update-hosts.sh old mode 100755 new mode 100644 similarity index 84% rename from packer/solaris/install-gemfire.sh rename to packer/linux/files/usr/local/bin/update-hosts.sh index a9d18b42c7..43921dee34 --- a/packer/solaris/install-gemfire.sh +++ b/packer/linux/files/usr/local/bin/update-hosts.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/bash # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with @@ -15,8 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -x -e -o pipefail - -mkdir /gemfire -gtar -xzvf gemfire.tar.gz -C /gemfire -rm gemfire.tar.gz +# add hostname to /etc/hosts if not set +if (! getent hosts `hostname` >/dev/null); then + echo `hostname -I` `hostname` >> /etc/hosts +fi diff --git a/packer/rhel/install-cmake.sh b/packer/linux/install-cmake.sh similarity index 87% rename from packer/rhel/install-cmake.sh rename to packer/linux/install-cmake.sh index 71afe7ebd0..44e6248f74 100644 --- a/packer/rhel/install-cmake.sh +++ b/packer/linux/install-cmake.sh @@ -17,9 +17,11 @@ set -x -e -o pipefail +CMAKE_VERSION=3.16.8 + tmp=`mktemp` -curl -o ${tmp} -v -L https://cmake.org/files/v3.16/cmake-3.16.8-Linux-x86_64.sh +curl -o ${tmp} -L https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh bash ${tmp} --skip-license --prefix=/usr/local diff --git a/packer/rhel/install-jdk-1.8.sh b/packer/linux/install-geode.sh similarity index 75% rename from packer/rhel/install-jdk-1.8.sh rename to packer/linux/install-geode.sh index 016ea05303..6447450644 100644 --- a/packer/rhel/install-jdk-1.8.sh +++ b/packer/linux/install-geode.sh @@ -17,11 +17,10 @@ set -x -e -o pipefail -tmp=`mktemp -d` +GEODE_VERSION=1.12.0 -curl -o ${tmp}/jdk.rpm -v -j -k -L -H "Cookie: oraclelicense=accept-securebackup-cookie" \ - http://download.oracle.com/otn-pub/java/jdk/8u131-b11/d54c1d3a095b4ff2b6607d096fa80163/jdk-8u131-linux-x64.rpm +cd /usr/local +curl -L "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | \ + tar xzf - -yum install -y ${tmp}/jdk.rpm - -rm -rf ${tmp} +echo export GEODE_HOME=/usr/local/apache-geode-${GEODE_VERSION} > /etc/profile.d/geode.sh diff --git a/packer/ubuntu/install-cmake.sh b/packer/linux/setup-files.sh similarity index 82% rename from packer/ubuntu/install-cmake.sh rename to packer/linux/setup-files.sh index 71afe7ebd0..4f23ff7ad1 100644 --- a/packer/ubuntu/install-cmake.sh +++ b/packer/linux/setup-files.sh @@ -17,10 +17,11 @@ set -x -e -o pipefail -tmp=`mktemp` +cp -rv /tmp/files/* / +rm -rf /tmp/files -curl -o ${tmp} -v -L https://cmake.org/files/v3.16/cmake-3.16.8-Linux-x86_64.sh +chmod +x /usr/local/bin/update-hosts.sh -bash ${tmp} --skip-license --prefix=/usr/local - -rm -f ${tmp} +systemctl daemon-reload +systemctl enable update-hosts.service +systemctl start update-hosts.service diff --git a/packer/rhel-7-base.json b/packer/rhel-7-base.json deleted file mode 100644 index 84d43f8029..0000000000 --- a/packer/rhel-7-base.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"ami-775e4f16", - "source_image_name":"X.vmx", - "image_name":"rhel-7-base" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.micro", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "ssh_username":"ec2-user", - "ssh_pty":true - } - ], - "provisioners":[ - { - "type":"shell", - "script":"rhel/wait-for-cloud-init.sh" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/update.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/cleanup.sh" - ] - } - ] -} \ No newline at end of file diff --git a/packer/ubuntu/install-coverage-tools.sh b/packer/rhel/7/install-devtoolset.sh similarity index 85% rename from packer/ubuntu/install-coverage-tools.sh rename to packer/rhel/7/install-devtoolset.sh index 8d268af25d..999df2a291 100644 --- a/packer/ubuntu/install-coverage-tools.sh +++ b/packer/rhel/7/install-devtoolset.sh @@ -17,8 +17,6 @@ set -x -e -o pipefail -# for coveralls.io -pip install cpp-coveralls +yum --enablerepo '*rhscl*' install -y devtoolset-4-gcc-c++ -# For codecov.ip -apt-get -y install lcov \ No newline at end of file +echo "source scl_source enable devtoolset-4" > /etc/profile.d/devtoolset.sh diff --git a/packer/rhel/add-user-build.sh b/packer/rhel/aws/add-user-build.sh similarity index 100% rename from packer/rhel/add-user-build.sh rename to packer/rhel/aws/add-user-build.sh diff --git a/packer/rhel/files/etc/cloud/cloud.cfg.d/10_hosts.cfg b/packer/rhel/aws/files/etc/cloud/cloud.cfg.d/10_hosts.cfg similarity index 100% rename from packer/rhel/files/etc/cloud/cloud.cfg.d/10_hosts.cfg rename to packer/rhel/aws/files/etc/cloud/cloud.cfg.d/10_hosts.cfg diff --git a/packer/solaris/cleanup.sh b/packer/rhel/aws/setup-files.sh old mode 100755 new mode 100644 similarity index 92% rename from packer/solaris/cleanup.sh rename to packer/rhel/aws/setup-files.sh index 121335619d..3a0d930d00 --- a/packer/solaris/cleanup.sh +++ b/packer/rhel/aws/setup-files.sh @@ -15,5 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +set -x -e -o pipefail -exit 0 \ No newline at end of file +cp -rv /tmp/files/* / +rm -rf /tmp/files diff --git a/packer/rhel/install-coverage-tools.sh b/packer/rhel/install-coverage-tools.sh deleted file mode 100644 index b696e33d5f..0000000000 --- a/packer/rhel/install-coverage-tools.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -# for coveralls.io -scl enable python27 'pip install cpp-coveralls' - -# For codecov.ip -yum install -y lcov \ No newline at end of file diff --git a/packer/rhel/install-repos.sh b/packer/rhel/install-jdk-11.sh similarity index 77% rename from packer/rhel/install-repos.sh rename to packer/rhel/install-jdk-11.sh index c4e62732c3..6cffe2e8c6 100644 --- a/packer/rhel/install-repos.sh +++ b/packer/rhel/install-jdk-11.sh @@ -17,6 +17,14 @@ set -x -e -o pipefail -yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm -yum install -y https://rhel7.iuscommunity.org/ius-release.rpm +cat <<'EOF' > /etc/yum.repos.d/bellsoft.repo +[BellSoft] +name=BellSoft Repository +baseurl=https://yum.bell-sw.com +enabled=1 +gpgcheck=1 +gpgkey=https://download.bell-sw.com/pki/GPG-KEY-bellsoft +priority=1 +EOF +yum install -y bellsoft-java11 diff --git a/packer/rhel/install-scl-devtoolset.sh b/packer/rhel/install-scl-devtoolset.sh deleted file mode 100644 index f165e44c1b..0000000000 --- a/packer/rhel/install-scl-devtoolset.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -yum-config-manager --enable rhui-REGION-rhel-server-rhscl -yum -y install devtoolset-4-gcc-c++ - -echo "source scl_source enable devtoolset-4" >> ~build/.bashrc diff --git a/packer/rhel/install-scl-python27.sh b/packer/rhel/install-scl-python27.sh deleted file mode 100644 index 666a3e9b5d..0000000000 --- a/packer/rhel/install-scl-python27.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -yum-config-manager --enable rhui-REGION-rhel-server-rhscl -yum -y install python27-python-pip -scl enable python27 'pip install --upgrade pip' - -echo "source scl_source enable python27" >> ~build/.bashrc diff --git a/packer/solaris/changepasswd b/packer/solaris/changepasswd deleted file mode 100755 index caae9c04b8..0000000000 --- a/packer/solaris/changepasswd +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env expect -f -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set force_conservative 0 ;# set to 1 to force conservative mode even if - ;# script wasn't run conservatively originally -if {$force_conservative} { - set send_slow {1 .1} - proc send {ignore arg} { - sleep .1 - exp_send -s -- $arg - } -} - -set username [lindex $argv 0] -set password [lindex $argv 1] - -set timeout -1 -spawn passwd $username -match_max 100000 -expect "New Password: " -send -- "$password\r" -expect "Re-enter new Password: " -send -- "$password\r" -expect eof diff --git a/packer/solaris/install-build-tools.sh b/packer/solaris/install-build-tools.sh deleted file mode 100755 index 11ab841f76..0000000000 --- a/packer/solaris/install-build-tools.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -pkg change-facet \ - facet.version-lock.consolidation/java-8/java-8-incorporation=false - -# Install required tools -pkg install -v --accept \ - system/header \ - developer/assembler \ - developer/java/jdk-8 \ - developer/build/gnu-make \ - archiver/gnu-tar \ - text/gnu-patch - -# too many conflicts with system libraries, use opencsw -/opt/csw/bin/pkgutil -U -/opt/csw/bin/pkgutil -i -y \ - git \ - doxygen diff --git a/packer/solaris/install-cmake.sh b/packer/solaris/install-cmake.sh deleted file mode 100644 index b13b7b4049..0000000000 --- a/packer/solaris/install-cmake.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -source ~/.bashrc - -NCPU=2 - -pushd `mktemp -d` -wget -O - https://cmake.org/files/v3.16/cmake-3.16.8.tar.gz | \ - gtar --strip-components=1 -zxf - -./bootstrap --system-curl --no-qt-gui --parallel=$NCPU -- -DBUILD_CursesDialog=off -gmake -j$NCPU -gmake install -popd - -p='PATH=$PATH:/usr/local/bin; export PATH' -echo "$p" >> ~/.profile -echo "$p" >> ~/.bashrc -echo "$p" >> /etc/skel/.profile -echo "$p" >> /etc/skel/.bashrc diff --git a/packer/solaris/install-opencsw.sh b/packer/solaris/install-opencsw.sh deleted file mode 100644 index 010950ef9b..0000000000 --- a/packer/solaris/install-opencsw.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e - -yes | pkgadd -d http://get.opencsw.org/now all -/opt/csw/bin/pkgutil -U - -p='PATH=$PATH:/opt/csw/bin; export PATH' -echo "$p" >> ~/.profile -echo "$p" >> ~/.bashrc -echo "$p" >> /etc/skel/.profile -echo "$p" >> /etc/skel/.bashrc diff --git a/packer/solaris/install-solarisstudio.sh b/packer/solaris/install-solarisstudio.sh deleted file mode 100644 index faa6b0ce49..0000000000 --- a/packer/solaris/install-solarisstudio.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e - -pkg change-facet facet.version-lock.consolidation/sunpro/sunpro-incorporation=false -pkg update sunpro-incorporation || true - -pkg set-publisher \ - -k /var/pkg/ssl/pkg.oracle.com.key.pem \ - -c /var/pkg/ssl/pkg.oracle.com.certificate.pem \ - -G '*' -g https://pkg.oracle.com/solarisstudio/release solarisstudio - -pkg install --accept -v developerstudio-126/c++ developerstudio-126/dbx - -p='PATH=$PATH:/opt/developerstudio12.6/bin; export PATH' -echo "$p" >> ~/.profile -echo "$p" >> ~/.bashrc -echo "$p" >> /etc/skel/.profile -echo "$p" >> /etc/skel/.bashrc diff --git a/packer/solaris/install-test-tools.sh b/packer/solaris/install-test-tools.sh deleted file mode 100755 index 2cbeb9db78..0000000000 --- a/packer/solaris/install-test-tools.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -pkg change-facet \ - facet.version-lock.consolidation/java-8/java-8-incorporation=false - -# Install required tools -pkg install -v --accept \ - developer/java/jdk-8 \ - archiver/gnu-tar diff --git a/packer/solaris/update.sh b/packer/solaris/update.sh deleted file mode 100755 index 3181ea5b65..0000000000 --- a/packer/solaris/update.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -pkg update -v --no-backup-be --accept - -# exits 4 on no update -exit 0 diff --git a/packer/test-rhel-7.json b/packer/test-rhel-7.json deleted file mode 100644 index 2b16e5ddde..0000000000 --- a/packer/test-rhel-7.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"", - "source_image_name":"X.vmx", - "image_name":"test-rhel-7" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.micro", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "ssh_username":"ec2-user", - "ssh_pty":true - } - ], - "provisioners":[ - { - "type":"shell", - "script":"rhel/wait-for-cloud-init.sh" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/update.sh" - ] - }, - { - "type":"file", - "source":"rhel/files", - "destination":"/tmp" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "inline":[ - "cp -rv /tmp/files/* /", - "rm -rf /tmp/files", - "chmod +x /etc/init-user.sh" - ] - }, - { - "type":"file", - "source":"rhel/init-hosts.rc.local", - "destination":"/tmp/init-hosts.rc.local" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "inline":[ - "cat /tmp/init-hosts.rc.local >> /etc/rc.local" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/disable-selinux.sh", - "rhel/add-user-build.sh", - "rhel/install-repos.sh", - "rhel/install-jdk-1.8.sh", - "rhel/install-cmake.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/install-gemfire.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "rhel/cleanup.sh" - ] - } - ] -} diff --git a/packer/test-solaris-x86.json b/packer/test-solaris-x86.json deleted file mode 100644 index 6657b676a4..0000000000 --- a/packer/test-solaris-x86.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "variables":{ - "image_name":"test-solaris-x86", - "openstack_source_image":"c0df7ff9-fc8f-4220-ac1f-fd24924dfe7a", - "vmware_source_image_name":"X.vmx", - "gemfire_archive":"gemfire.tar.gz", - "pkg_oracle_com_certificate":"pkg.oracle.com.certificate.pem", - "pkg_oracle_com_key":"pkg.oracle.com.key.pem" - }, - "builders":[ - { - "type":"openstack", - "identity_endpoint":"{{user `openstack_identity_endpoint`}}", - "tenant_name":"{{user `openstack_tenant_name`}}", - "username":"{{user `openstack_username`}}", - "password":"{{user `openstack_password`}}", - "region":"{{user `openstack_region`}}", - "ssh_username":"root", - "image_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "source_image":"{{user `openstack_source_image`}}", - "flavor":"Oracle Solaris non-global zone - tiny", - "insecure":"true" - } - ], - "provisioners":[ - { - "type":"file", - "source":"{{user `pkg_oracle_com_certificate`}}", - "destination":"/var/pkg/ssl/pkg.oracle.com.certificate.pem" - }, - { - "type":"file", - "source":"{{user `pkg_oracle_com_key`}}", - "destination":"/var/pkg/ssl/pkg.oracle.com.key.pem" - }, - { - "type":"shell", - "scripts":[ - "solaris/install-opencsw.sh", - "solaris/install-test-tools.sh" - ] - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"shell", - "scripts":[ - "solaris/install-gemfire.sh" - ] - }, - { - "type":"file", - "source":"solaris/changepasswd", - "destination":"changepasswd" - }, - { - "type":"shell", - "scripts":"solaris/add-user-build.sh" - }, - { - "type":"shell", - "scripts":[ - "solaris/update.sh", - "solaris/cleanup.sh" - ] - } - ] -} diff --git a/packer/test-windows-2012-r2.json b/packer/test-windows-2012-r2.json deleted file mode 100644 index 953719d472..0000000000 --- a/packer/test-windows-2012-r2.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"", - "source_image_name":"X.vmx", - "image_name":"test-windows-2016" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.large", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "communicator":"winrm", - "winrm_username":"Administrator", - "launch_block_device_mappings":[ - { - "device_name":"/dev/sda1", - "delete_on_termination":true, - "volume_size":40 - } - ] - } - ], - "provisioners":[ - { - "pause_before":"30s", - "type":"powershell", - "inline":[ - "choco install adoptopenjdk8 -confirm", - "choco install cmake.portable -confirm" - ] - }, - { - "type":"file", - "source":"windows/Packer.psm1", - "destination":"Documents/WindowsPowerShell/Modules/Packer/Packer.psm1" - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"powershell", - "scripts":[ - "windows/install-gemfire.ps1" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/add-user-build.ps1" - ] - }, - { - "pause_before":"30s", - "type":"file", - "source":"windows/init-user-build.ps1", - "destination":"C:/Users/build/init-user-build.ps1" - }, - { - "type":"powershell", - "scripts":[ - "windows/cleanup.ps1" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/setup-ec2config.ps1" - ], - "only":[ - "amazon-ebs" - ] - } - ] -} diff --git a/packer/test-windows-2012.json b/packer/test-windows-2012.json deleted file mode 100644 index 953719d472..0000000000 --- a/packer/test-windows-2012.json +++ /dev/null @@ -1,88 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"", - "source_image_name":"X.vmx", - "image_name":"test-windows-2016" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.large", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "communicator":"winrm", - "winrm_username":"Administrator", - "launch_block_device_mappings":[ - { - "device_name":"/dev/sda1", - "delete_on_termination":true, - "volume_size":40 - } - ] - } - ], - "provisioners":[ - { - "pause_before":"30s", - "type":"powershell", - "inline":[ - "choco install adoptopenjdk8 -confirm", - "choco install cmake.portable -confirm" - ] - }, - { - "type":"file", - "source":"windows/Packer.psm1", - "destination":"Documents/WindowsPowerShell/Modules/Packer/Packer.psm1" - }, - { - "type":"file", - "source":"{{user `gemfire_archive`}}", - "destination":"gemfire.tar.gz" - }, - { - "type":"powershell", - "scripts":[ - "windows/install-gemfire.ps1" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/add-user-build.ps1" - ] - }, - { - "pause_before":"30s", - "type":"file", - "source":"windows/init-user-build.ps1", - "destination":"C:/Users/build/init-user-build.ps1" - }, - { - "type":"powershell", - "scripts":[ - "windows/cleanup.ps1" - ] - }, - { - "type":"powershell", - "scripts":[ - "windows/setup-ec2config.ps1" - ], - "only":[ - "amazon-ebs" - ] - } - ] -} diff --git a/packer/ubuntu-16.04-base.json b/packer/ubuntu-16.04-base.json deleted file mode 100644 index abdb06401a..0000000000 --- a/packer/ubuntu-16.04-base.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"ami-0afae182eed9d2b46", - "source_image_name":"X.vmx", - "image_name":"ubuntu-16.04-base" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.micro", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "ssh_username":"ubuntu", - "ssh_pty":true - } - ], - "provisioners":[ - { - "type":"shell", - "script":"ubuntu/wait-for-cloud-init.sh" - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "ubuntu/update.sh" - ] - }, - { - "type":"shell", - "execute_command":"{{.Vars}} sudo -E -S bash '{{.Path}}'", - "scripts":[ - "ubuntu/cleanup.sh" - ] - } - ] -} \ No newline at end of file diff --git a/packer/ubuntu/add-user-build.sh b/packer/ubuntu/aws/add-user-build.sh similarity index 100% rename from packer/ubuntu/add-user-build.sh rename to packer/ubuntu/aws/add-user-build.sh diff --git a/packer/ubuntu/cleanup.sh b/packer/ubuntu/cleanup.sh index 1398249d9f..77cf3d5fc5 100644 --- a/packer/ubuntu/cleanup.sh +++ b/packer/ubuntu/cleanup.sh @@ -18,4 +18,3 @@ set -x -e -o pipefail apt-get autoclean - diff --git a/packer/ubuntu/install-clang-format.sh b/packer/ubuntu/install-clang-format.sh index 03a1ce104b..c2bbeecdb2 100644 --- a/packer/ubuntu/install-clang-format.sh +++ b/packer/ubuntu/install-clang-format.sh @@ -17,6 +17,12 @@ set -x -e -o pipefail +. /etc/os-release + +wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - +apt-add-repository "deb http://apt.llvm.org/${VERSION_CODENAME}/ llvm-toolchain-${VERSION_CODENAME}-6.0 main" + +apt-get update apt-get -y install clang-format-6.0 -ln -s /usr/bin/clang-format-6.0 /usr/bin/clang-format \ No newline at end of file +ln -s /usr/bin/clang-format-6.0 /usr/bin/clang-format diff --git a/packer/ubuntu/install-gemfire.sh b/packer/ubuntu/install-gemfire.sh deleted file mode 100644 index 64382af3fe..0000000000 --- a/packer/ubuntu/install-gemfire.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -mkdir /gemfire -tar -zxf gemfire.tar.gz -C /gemfire -rm gemfire.tar.gz diff --git a/packer/rhel/install-build-rpms.sh b/packer/ubuntu/install-jdk-11.sh similarity index 76% rename from packer/rhel/install-build-rpms.sh rename to packer/ubuntu/install-jdk-11.sh index 2809a1f7fe..4b758ef791 100644 --- a/packer/rhel/install-build-rpms.sh +++ b/packer/ubuntu/install-jdk-11.sh @@ -17,7 +17,8 @@ set -x -e -o pipefail -yum install -y make doxygen zlib-devel patch openssl-devel +wget -q -O - https://download.bell-sw.com/pki/GPG-KEY-bellsoft | sudo apt-key add - +echo "deb [arch=amd64] https://apt.bell-sw.com/ stable main" | sudo tee /etc/apt/sources.list.d/bellsoft.list -yum install -y rh-git29 -echo "source scl_source enable rh-git29" >> ~build/.bashrc +sudo apt-get update +sudo apt-get install -y bellsoft-java11 diff --git a/packer/ubuntu/install-packages.sh b/packer/ubuntu/install-packages.sh deleted file mode 100644 index f3e803f51b..0000000000 --- a/packer/ubuntu/install-packages.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -x -e -o pipefail - -apt-get -y install \ - build-essential \ - libc++-dev \ - libc++abi-dev \ - zlib1g-dev \ - libssl-dev \ - wget \ - doxygen \ - graphviz \ - openjdk-8-jdk \ - python \ - python-pip - -pip install --upgrade pip diff --git a/packer/ubuntu/wait-for-cloud-init.sh b/packer/ubuntu/wait-for-cloud-init.sh deleted file mode 100644 index 86b394c308..0000000000 --- a/packer/ubuntu/wait-for-cloud-init.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -#set -x -set -e -set -o pipefail - -# leaves tail running but we should be restarting anyway -{ tail -n +1 -f /var/log/cloud-init.log /var/log/cloud-init-output.log & } | sed \ - -e '/Cloud-init .* finished/q' \ - -e '/Failed at merging in cloud config/q1' From e26929d58d568eb2187903d68cde845a16cda6f4 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 6 Jun 2020 08:24:52 -0700 Subject: [PATCH 002/155] Added Google Compute support. Reorganize and cleanup Windows. --- packer/build-rhel-7.json | 7 +- packer/build-rhel-8.json | 7 +- packer/build-ubuntu-16.04.json | 7 +- packer/build-ubuntu-18.04.json | 7 +- packer/build-windows-vs-2017.json | 105 ++++++++ packer/build-windows.json | 101 -------- packer/windows-2016-base.json | 128 ++++++---- packer/windows-2016-vs-2017.json | 97 +++++--- packer/windows/Packer.psm1 | 233 ------------------ packer/windows/add-user-build.ps1 | 29 --- ...install-activeperl.ps1 => disable-uac.ps1} | 12 +- packer/windows/enable-winrm.ps1 | 36 +++ packer/windows/init-user-build.ps1 | 38 --- packer/windows/install-chocolatey.ps1 | 10 +- .../install-dependencies-app-from-github.ps1 | 37 --- packer/windows/install-dependencies.ps1 | 25 -- packer/windows/install-doxygen.ps1 | 26 -- packer/windows/install-gemfire.ps1 | 21 -- ...{install-openssl.ps1 => install-geode.ps1} | 16 +- packer/windows/install-ssh.ps1 | 4 +- packer/windows/install-vs-2017-community.ps1 | 13 +- packer/windows/setup-ec2config.ps1 | 33 --- packer/windows/uninstall-doxygen.ps1 | 23 -- packer/windows/vs-2015-admin.xml | 100 -------- packer/windows/winrm.cloud-init | 26 -- 25 files changed, 334 insertions(+), 807 deletions(-) create mode 100644 packer/build-windows-vs-2017.json delete mode 100644 packer/build-windows.json delete mode 100644 packer/windows/Packer.psm1 delete mode 100644 packer/windows/add-user-build.ps1 rename packer/windows/{install-activeperl.ps1 => disable-uac.ps1} (69%) create mode 100644 packer/windows/enable-winrm.ps1 delete mode 100644 packer/windows/init-user-build.ps1 delete mode 100644 packer/windows/install-dependencies-app-from-github.ps1 delete mode 100644 packer/windows/install-dependencies.ps1 delete mode 100644 packer/windows/install-doxygen.ps1 delete mode 100644 packer/windows/install-gemfire.ps1 rename packer/windows/{install-openssl.ps1 => install-geode.ps1} (59%) delete mode 100644 packer/windows/setup-ec2config.ps1 delete mode 100644 packer/windows/uninstall-doxygen.ps1 delete mode 100644 packer/windows/vs-2015-admin.xml delete mode 100644 packer/windows/winrm.cloud-init diff --git a/packer/build-rhel-7.json b/packer/build-rhel-7.json index 8efeaaefec..f5726e22c2 100644 --- a/packer/build-rhel-7.json +++ b/packer/build-rhel-7.json @@ -28,17 +28,18 @@ }, { "type": "googlecompute", + "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", - "source_image": "{{user `source_image`}}", - "ssh_username": "packer", "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", "version": "{{user `version`}}", "source_ami": "{{user `source_image`}}" - } + }, + "ssh_username": "packer" } ], "provisioners": [ diff --git a/packer/build-rhel-8.json b/packer/build-rhel-8.json index 2b59616bbc..b241987e16 100644 --- a/packer/build-rhel-8.json +++ b/packer/build-rhel-8.json @@ -28,17 +28,18 @@ }, { "type": "googlecompute", + "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", - "source_image": "{{user `source_image`}}", - "ssh_username": "packer", "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", "version": "{{user `version`}}", "source_ami": "{{user `source_image`}}" - } + }, + "ssh_username": "packer" } ], "provisioners": [ diff --git a/packer/build-ubuntu-16.04.json b/packer/build-ubuntu-16.04.json index d87d539d34..77e51357f7 100644 --- a/packer/build-ubuntu-16.04.json +++ b/packer/build-ubuntu-16.04.json @@ -28,17 +28,18 @@ }, { "type": "googlecompute", + "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", - "source_image": "{{user `source_image`}}", - "ssh_username": "packer", "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", "version": "{{user `version`}}", "source_ami": "{{user `source_image`}}" - } + }, + "ssh_username": "packer" } ], "provisioners": [ diff --git a/packer/build-ubuntu-18.04.json b/packer/build-ubuntu-18.04.json index b29bf3cd94..7c10e060ee 100644 --- a/packer/build-ubuntu-18.04.json +++ b/packer/build-ubuntu-18.04.json @@ -28,17 +28,18 @@ }, { "type": "googlecompute", + "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", - "source_image": "{{user `source_image`}}", - "ssh_username": "packer", "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", "version": "{{user `version`}}", "source_ami": "{{user `source_image`}}" - } + }, + "ssh_username": "packer" } ], "provisioners": [ diff --git a/packer/build-windows-vs-2017.json b/packer/build-windows-vs-2017.json new file mode 100644 index 0000000000..c36e27f387 --- /dev/null +++ b/packer/build-windows-vs-2017.json @@ -0,0 +1,105 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "build-windows-vs-2017" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "c5d.2xlarge", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_ami`}}" + }, + "communicator": "ssh", + "ssh_username": "Administrator", + "ssh_handshake_attempts": 100, + "ssh_timeout": "15m", + "launch_block_device_mappings": [ + { + "device_name": "/dev/sda1", + "delete_on_termination": true, + "volume_size": 100 + } + ] + }, + { + "type": "googlecompute", + "machine_type": "n1-standard-2", + "project_id": "{{user `googlecompute_project`}}", + "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "disk_size": "100", + "communicator": "winrm", + "winrm_username": "packer_user", + "winrm_insecure": true, + "winrm_use_ssl": true + } + ], + "provisioners": [ + { + "type": "powershell", + "inline": [ + "choco install liberica11jdk -confirm", + "choco install cmake.portable -confirm", + "choco install git.install -confirm", + "choco install dogtail.dotnet3.5sp1 -confirm", + "choco install nunit.install --version 2.6.4 -confirm", + "choco install netfx-4.5.2-devpack --allowEmptyChecksums -confirm", + "choco install nuget.commandline -confirm", + "choco install doxygen.install -confirm", + "choco install openssl -confirm", + "choco install strawberryperl -confirm" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/install-geode.ps1" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/setup-ec2launch.ps1" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "powershell", + "inline": [ + "GCESysprep -NoShutdown" + ], + "only": [ + "googlecompute" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" + ] + } + ] +} diff --git a/packer/build-windows.json b/packer/build-windows.json deleted file mode 100644 index 46723afbec..0000000000 --- a/packer/build-windows.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "variables":{ - "region":"us-west-2", - "source_ami":"ami-4e6ea82e", - "source_image_name":"X.vmx", - "image_name":"build-windows", - "gemfire_archive": "gemfire.zip" - }, - "builders":[ - { - "type":"amazon-ebs", - "instance_type":"t2.large", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" - }, - "communicator":"ssh", - "ssh_username":"Administrator", - "ssh_handshake_attempts":100, - "ssh_timeout":"15m", - "launch_block_device_mappings":[ - { - "device_name":"/dev/sda1", - "delete_on_termination":true, - "volume_size":100 - } - ] - } - ], - "provisioners": [ - { - "pause_before":"60s", - "type": "powershell", - "scripts": [ - "windows/install-dependencies.ps1" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/install-activeperl.ps1" - ] - }, - { - "type": "file", - "source": "{{user `gemfire_archive`}}", - "destination": "gemfire.tar.gz" - }, - { - "type": "powershell", - "scripts": [ - "windows/install-gemfire.ps1" - ] - }, - { - "type":"file", - "source":"windows/Packer.psm1", - "destination":"Documents/WindowsPowerShell/Modules/Packer/Packer.psm1" - }, - { - "type": "powershell", - "scripts": [ - "windows/install-openssl.ps1" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/add-user-build.ps1" - ] - }, - { - "type": "file", - "source": "windows/init-user-build.ps1", - "destination": "C:/Users/build/init-user-build.ps1" - }, - { - "type": "powershell", - "scripts": [ - "windows/cleanup.ps1" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/setup-ec2launch.ps1" - ], - "only": [ - "amazon-ebs" - ] - } - ] -} diff --git a/packer/windows-2016-base.json b/packer/windows-2016-base.json index 62492e2890..52494f0137 100644 --- a/packer/windows-2016-base.json +++ b/packer/windows-2016-base.json @@ -1,74 +1,104 @@ { - "variables":{ - "region":"us-west-2", - "source_ami":"", - "source_image_name":"X.vmx", - "image_name":"windows-2016-base" + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "windows-2016-base" }, - "builders":[ + "builders": [ { - "type":"amazon-ebs", - "instance_type":"t2.large", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" + "type": "amazon-ebs", + "instance_type": "c5d.2xlarge", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_ami`}}" }, - "communicator":"winrm", - "winrm_username":"Administrator", - "user_data_file":"windows/winrm.cloud-init" + "user_data_file": "windows/enable-winrm.ps1", + "communicator": "winrm", + "winrm_username": "Administrator", + "winrm_insecure": true, + "winrm_use_ssl": true + }, + { + "type": "googlecompute", + "machine_type": "n1-standard-1", + "project_id": "{{user `googlecompute_project`}}", + "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "disk_size": "50", + "metadata": { + "windows-startup-script-cmd": "winrm quickconfig -quiet & net user /add packer_user & net localgroup administrators packer_user /add & winrm set winrm/config/service/auth @{Basic=\"true\"}" + }, + "communicator": "winrm", + "winrm_username": "packer_user", + "winrm_insecure": true, + "winrm_use_ssl": true } ], - "provisioners":[ - { - "pause_before":"30s", - "type":"file", - "source":"windows/Packer.psm1", - "destination":"Documents/WindowsPowerShell/Modules/Packer/Packer.psm1" - }, + "provisioners": [ { - "type":"powershell", - "scripts":[ + "type": "powershell", + "scripts": [ + "windows/disable-uac.ps1", "windows/install-chocolatey.ps1" ] }, { - "type":"powershell", - "scripts":[ - "windows/install-doxygen.ps1" + "type": "file", + "source": "windows/init-ssh.ps1", + "destination": "$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1", + "only": [ + "amazon-ebs" ] }, { - "type":"file", - "source":"windows/init-ssh.ps1", - "destination":"$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1" - }, - { - "type":"powershell", - "scripts":[ + "type": "powershell", + "scripts": [ "windows/install-ssh.ps1" + ], + "only": [ + "amazon-ebs" ] }, { - "type":"powershell", - "scripts":[ - "windows/cleanup.ps1" + "type": "powershell", + "scripts": [ + "windows/setup-ec2launch.ps1" + ], + "only": [ + "amazon-ebs" ] }, { - "type":"powershell", - "scripts":[ - "windows/setup-ec2launch.ps1" + "type": "powershell", + "inline": [ + "GCESysprep -NoShutdown" ], - "only":[ - "amazon-ebs" + "only": [ + "googlecompute" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" ] } ] diff --git a/packer/windows-2016-vs-2017.json b/packer/windows-2016-vs-2017.json index 9ebe9c3728..34bf24c364 100644 --- a/packer/windows-2016-vs-2017.json +++ b/packer/windows-2016-vs-2017.json @@ -1,42 +1,62 @@ { - "variables":{ - "region":"us-west-2", - "source_ami":"ami-ac5395cc", - "source_image_name":"X.vmx", - "image_name":"windows-2016-vs-2017" + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "source_image": "", + "source_image_name": "", + "image_name": "windows-2016-vs-2017" }, - "builders":[ + "builders": [ { - "type":"amazon-ebs", - "instance_type":"t2.large", - "ami_name":"native-{{user `version`}}-{{user `image_name`}} {{timestamp}}", - "access_key":"{{user `aws_access_key`}}", - "secret_key":"{{user `aws_secret_key`}}", - "region":"{{user `region`}}", - "source_ami":"{{user `source_ami`}}", - "subnet_id":"{{user `subnet_id`}}", - "vpc_id":"{{user `vpc_id`}}", - "tags":{ - "team":"native", - "version":"{{user `version`}}", - "source_ami":"{{user `source_ami`}}" + "type": "amazon-ebs", + "instance_type": "c5d.2xlarge", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "access_key": "{{user `aws_access_key`}}", + "secret_key": "{{user `aws_secret_key`}}", + "region": "{{user `aws_region`}}", + "source_ami": "{{user `source_image`}}", + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_ami`}}" }, - "communicator":"ssh", - "ssh_username":"Administrator", - "ssh_handshake_attempts":100, - "ssh_timeout":"15m", - "launch_block_device_mappings":[ + "communicator": "ssh", + "ssh_username": "Administrator", + "ssh_handshake_attempts": 100, + "ssh_timeout": "15m", + "launch_block_device_mappings": [ { - "device_name":"/dev/sda1", - "delete_on_termination":true, - "volume_size":100 + "device_name": "/dev/sda1", + "delete_on_termination": true, + "volume_size": 100 } ] + }, + { + "type": "googlecompute", + "machine_type": "n1-standard-2", + "project_id": "{{user `googlecompute_project`}}", + "zone": "{{user `googlecompute_zone`}}", + "source_image": "{{user `source_image`}}", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}", + "source_ami": "{{user `source_image`}}" + }, + "disk_size": "50", + "communicator": "winrm", + "winrm_username": "packer_user", + "winrm_insecure": true, + "winrm_use_ssl": true } ], "provisioners": [ { - "pause_before":"60s", "type": "powershell", "scripts": [ "windows/install-vs-2017-community.ps1" @@ -44,23 +64,32 @@ }, { "type": "powershell", - "inline": [ - "Install-WindowsFeature -name NET-Framework-Core" + "inline": [ + "Install-WindowsFeature -name NET-Framework-Core" ] }, { "type": "powershell", "scripts": [ - "windows/cleanup.ps1" + "windows/setup-ec2launch.ps1" + ], + "only": [ + "amazon-ebs" ] }, { "type": "powershell", - "scripts": [ - "windows/setup-ec2launch.ps1" + "inline": [ + "GCESysprep -NoShutdown" ], "only": [ - "amazon-ebs" + "googlecompute" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" ] } ] diff --git a/packer/windows/Packer.psm1 b/packer/windows/Packer.psm1 deleted file mode 100644 index 6ca596229e..0000000000 --- a/packer/windows/Packer.psm1 +++ /dev/null @@ -1,233 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -Set-PSDebug -Trace 0 - -function Install-Package { - [CmdletBinding()] - param ( - [parameter(Mandatory=$false,Position=0)] - [string]$Uri, - - [parameter(Mandatory=$false)] - [string]$Installer, - - [parameter(Mandatory=$false)] - [string]$Log, - - [parameter(Mandatory=$false)] - [string[]]$ArgumentList=@(), - - [parameter(Mandatory=$false)] - [string]$Hash, - - [parameter(Mandatory=$false)] - [string]$DestinationPath, - - [parameter(Mandatory=$false)] - [System.Collections.IDictionary]$DownloadHeaders=@{}, - - [parameter(Mandatory=$false)] - [string]$DownloadMethod="Get", - - [parameter(Mandatory=$false)] - [string]$MsuPackage - ) - PROCESS { - Push-Location -Path $Env:temp - - if (-not $Installer) { - $Installer = $Env:temp + "\" + $Uri.Split('/')[-1] - } - - Write-Verbose "Install-Package: Uri=$Uri, Installer=$Installer, ArgumentList=$ArgumentList Log=$Log" - - if ($Uri) { - - if (!((Test-Path $Installer) -and ((Get-FileHash $Installer).hash -eq "$Hash"))) { - - if ($DownloadMethod -eq 'GET') { - $ds = New-Object psobject -Property @{downloadProgress = 0; downloadComplete = $false; error = 0} - - $wc = New-Object System.Net.WebClient - - if ($DownloadHeaders.Count -gt 0) { - $a = $DownloadHeaders.GetEnumerator() | % { "$($_.Name):$($_.Value)" } - $wc.Headers.Add($a) - } - - $eventDataComplete = Register-ObjectEvent $wc DownloadFileCompleted ` - -MessageData $ds ` - -Action { - $event.MessageData.downloadComplete = $true - $event.MessageData.error = $EventArgs.Error - } - - $eventDataProgress = Register-ObjectEvent $wc DownloadProgressChanged ` - -MessageData $ds ` - -Action { - $event.MessageData.downloadProgress = $EventArgs.ProgressPercentage - } - - while ($true) { - $ds.error = 0 - $ds.downloadComplete = $false - $ds.downloadProgress = 0 - - try { - $wc.DownloadFileAsync($Uri, $Installer) - } catch { - Write-Host $_.Exception.Message - } - - - $p = 0; - while (!$ds.downloadComplete) { - if ($ds.downloadProgress -gt $p) { - $p = $ds.downloadProgress; - Write-Host "Downloading... ($($ds.downloadProgress)%)" - Start-Sleep -m 100 - } - } - if ($ds.error) { - Write-Host "Error: $($ds.error)" - } else { - break; - } - } - } else { - # POST - Invoke-WebRequest -Uri $Uri ` - -Headers $DownloadHeaders ` - -Method $DownloadMethod ` - -OutFile $Installer - } - } - } - - Write-Host "Installing..." - if ($Installer -match "\.msi$") { - Write-Verbose "Installing via MSI" - $Log = "$Installer.log" - $ArgumentList = @("/package", $Installer, "/quiet", "/log", "$Log") + $ArgumentList - $Installer = "msiexec"; - } elseif ($Installer -match "\.msu$") { - Write-Verbose "Installing via MSU" - $Log = "$Installer.log" - Start-Process -FilePath "wusa" -ArgumentList @($Installer, "/extract:.") - $ArgumentList = @("/Online", "/Add-Package", "/NoRestart", "/PackagePath:$MsuPackage") + $ArgumentList - $Installer = "dism"; - } elseif ($Installer -match "\.zip$") { - Write-Verbose "Installing via ZIP" - Expand-Archive -Path $Installer -DestinationPath $DestinationPath -Force -Verbose - $Installer = ""; - } - - if ($Installer) { - Write-Verbose "Installer=$Installer, ArgumentList=$ArgumentList" - $ip = Start-Process -FilePath $Installer -ArgumentList $ArgumentList -NoNewWindow -PassThru - if (!$ip) { - throw "Error starting installer. Installer=$Installer, ArgumentList=$ArgumentList" - } - $handle = $ip.Handle - - if ($log) { - $lp = Start-Process -FilePath powershell.exe -ArgumentList @("-Command", "& {Import-Module Packer; Get-Tail -FilePath $Log -Follow}") -NoNewWindow -PassThru - #$lp= &{ Tail-File $Log -Follow } - #$lp - } - - while(-not $ip.HasExited) { - Write-Host -NoNewline '.' -# if ($Log) { -# $c = Get-Content -Path $Log -Tail 1 -# Write-Host ">> $c" -# } - sleep 1 - } - - $lp | Stop-Process -ErrorAction SilentlyContinue - - Write-Verbose "Exit Code: $($ip.ExitCode)" - if ($ip.ExitCode -eq 0) { - Write-Host "Installation complete." - } elseif ($ip.ExitCode -eq 3010) { - Write-Host "Restart required to complete installation." - } else { - throw "Error while installing. Installer exit code $($ip.ExitCode). Installer=$Installer, ArgumentList=$ArgumentList" - } - } - - Pop-Location - } -} - -function Get-Tail { - [CmdletBinding()] - param ( - [parameter(Mandatory=$true,Position=0)] - [string]$FilePath, - - [parameter(Mandatory=$false)] - [int]$Offset, - - [parameter(Mandatory=$false)] - [switch]$Follow - ) - PROCESS { - Write-Verbose "Tail-File: FilePath=$FilePath, Follow=$Follow" - - while (1) { - $ci = get-childitem $FilePath -ErrorAction SilentlyContinue - if ($ci) { break } - Start-Sleep -m 100 - } - - $fullName = $ci.FullName - - $reader = new-object System.IO.StreamReader(New-Object IO.FileStream($fullName, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [IO.FileShare]::ReadWrite)) - #start at the end of the file - $lastMaxOffset = $reader.BaseStream.Length - $Offset - - while ($true) - { - $reader = new-object System.IO.StreamReader(New-Object IO.FileStream($fullName, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [IO.FileShare]::ReadWrite)) - #if the file size has not changed, idle - if ($reader.BaseStream.Length -ge $lastMaxOffset) { - #seek to the last max offset - $reader.BaseStream.Seek($lastMaxOffset, [System.IO.SeekOrigin]::Begin) | out-null - - #read out of the file until the EOF - $line = "" - while (($line = $reader.ReadLine()) -ne $null) { - write-output $line - } - - #update the last max offset - $lastMaxOffset = $reader.BaseStream.Position - } elseif ($reader.BaseStream.Length -lt $lastMaxOffset) { - write-output "File truncated" - $lastMaxOffset = 0; - } - - if($Follow){ - Start-Sleep -m 100 - } else { - break; - } - } - - } -} \ No newline at end of file diff --git a/packer/windows/add-user-build.ps1 b/packer/windows/add-user-build.ps1 deleted file mode 100644 index 31606d686f..0000000000 --- a/packer/windows/add-user-build.ps1 +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -$user = "build" -$pass = "p1votal!" - -net.exe user $user $pass /add -net.exe localgroup Administrators $user /add -wmic.exe UserAccount where "Name='$user'" set PasswordExpires=False - -$spw = ConvertTo-SecureString $pass -AsPlainText -Force -$cred = New-Object System.Management.Automation.PSCredential -ArgumentList $user,$spw -Start-Process cmd /c -WindowStyle Hidden -Credential $cred -ErrorAction SilentlyContinue - - -schtasks.exe /Create /TN init-user-build /RU SYSTEM /SC ONSTART /TR "powershell.exe -File 'C:\Users\build\init-user-build.ps1'" - diff --git a/packer/windows/install-activeperl.ps1 b/packer/windows/disable-uac.ps1 similarity index 69% rename from packer/windows/install-activeperl.ps1 rename to packer/windows/disable-uac.ps1 index 0dc4157e4c..932fef960f 100644 --- a/packer/windows/install-activeperl.ps1 +++ b/packer/windows/disable-uac.ps1 @@ -4,9 +4,9 @@ # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -14,6 +14,10 @@ # limitations under the License. $ErrorActionPreference = "Stop" -Import-Module Packer -Force -Install-Package http://downloads.activestate.com/ActivePerl/releases/5.28.1.2801/ActivePerl-5.28.1.2801-MSWin32-x64-24563874.exe -ArgumentList "/exenoupdates /quiet" +Write-Host "Disabling UAC..." + +New-ItemProperty -Path HKLM:Software\Microsoft\Windows\CurrentVersion\Policies\System -Name EnableLUA -PropertyType DWord -Value 0 -Force +New-ItemProperty -Path HKLM:Software\Microsoft\Windows\CurrentVersion\Policies\System -Name ConsentPromptBehaviorAdmin -PropertyType DWord -Value 0 -Force + +Write-Host "Disabled UAC." diff --git a/packer/windows/enable-winrm.ps1 b/packer/windows/enable-winrm.ps1 new file mode 100644 index 0000000000..a694e06951 --- /dev/null +++ b/packer/windows/enable-winrm.ps1 @@ -0,0 +1,36 @@ + + +write-output "Running User Data Script" +write-host "(host) Running User Data Script" + +Set-ExecutionPolicy Unrestricted -Scope LocalMachine -Force -ErrorAction Ignore + +# Don't set this before Set-ExecutionPolicy as it throws an error +$ErrorActionPreference = "stop" + +# Remove HTTP listener +Remove-Item -Path WSMan:\Localhost\listener\listener* -Recurse + +$Cert = New-SelfSignedCertificate -CertstoreLocation Cert:\LocalMachine\My -DnsName "packer" +New-Item -Path WSMan:\LocalHost\Listener -Transport HTTPS -Address * -CertificateThumbPrint $Cert.Thumbprint -Force + +# WinRM +write-output "Setting up WinRM" +write-host "(host) setting up WinRM" + +cmd.exe /c winrm quickconfig -q +cmd.exe /c winrm set "winrm/config" '@{MaxTimeoutms="1800000"}' +cmd.exe /c winrm set "winrm/config/winrs" '@{MaxMemoryPerShellMB="8192"}' +cmd.exe /c winrm set "winrm/config/service" '@{AllowUnencrypted="true"}' +cmd.exe /c winrm set "winrm/config/client" '@{AllowUnencrypted="true"}' +cmd.exe /c winrm set "winrm/config/service/auth" '@{Basic="true"}' +cmd.exe /c winrm set "winrm/config/client/auth" '@{Basic="true"}' +cmd.exe /c winrm set "winrm/config/service/auth" '@{CredSSP="true"}' +cmd.exe /c winrm set "winrm/config/listener?Address=*+Transport=HTTPS" "@{Port=`"5986`";Hostname=`"packer`";CertificateThumbprint=`"$($Cert.Thumbprint)`"}" +cmd.exe /c netsh advfirewall firewall set rule group="remote administration" new enable=yes +cmd.exe /c netsh firewall add portopening TCP 5986 "Port 5986" +cmd.exe /c net stop winrm +cmd.exe /c sc config winrm start= auto +cmd.exe /c net start winrm + + \ No newline at end of file diff --git a/packer/windows/init-user-build.ps1 b/packer/windows/init-user-build.ps1 deleted file mode 100644 index 20fd98b083..0000000000 --- a/packer/windows/init-user-build.ps1 +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -$ssh = "C:\Users\build\.ssh" -$authorized_keys = "$ssh\authorized_keys" -if ( -not (Test-Path $authorized_keys -PathType Leaf) ) { - - write-host "Installing SSH authorized key" - - mkdir -p $ssh -ErrorAction SilentlyContinue - - Invoke-WebRequest -Uri 'http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key' -OutFile $authorized_keys - - # Give sshd permission to read authorized_keys - Import-Module 'C:\Program Files\OpenSSH-Win64\OpenSSHUtils' -force - - $currentUserSid = Get-UserSID -User "NT SERVICE\sshd" - $account = Get-UserAccount -UserSid $currentUserSid - $ace = New-Object System.Security.AccessControl.FileSystemAccessRule ` - ($account, "Read", "None", "None", "Allow") - $acl = Get-Acl $authorized_keys - $acl.AddAccessRule($ace) - Enable-Privilege SeRestorePrivilege | out-null - Set-Acl -Path $authorized_keys -AclObject $acl -Confirm:$false - -} diff --git a/packer/windows/install-chocolatey.ps1 b/packer/windows/install-chocolatey.ps1 index 60bdd85022..f86f6ad876 100644 --- a/packer/windows/install-chocolatey.ps1 +++ b/packer/windows/install-chocolatey.ps1 @@ -15,11 +15,9 @@ $ErrorActionPreference = "Stop" -write-host "Installing Chocolatey" +write-host "Installing Chocolatey..." -# Avoid bug in 7zip when running via WinRM -$Env:chocolateyUseWindowsCompression = $true +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) -iwr https://chocolatey.org/install.ps1 | iex - -write-host "Chocolatey Installed" +write-host "Installed Chocolatey." diff --git a/packer/windows/install-dependencies-app-from-github.ps1 b/packer/windows/install-dependencies-app-from-github.ps1 deleted file mode 100644 index 58580d4181..0000000000 --- a/packer/windows/install-dependencies-app-from-github.ps1 +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -$repo = "lucasg/Dependencies" -$file = "Dependencies.zip" - -$releases = "https://api.github.com/repos/$repo/releases" - -[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 - -Write-Host Determining latest release -$tag = (Invoke-WebRequest $releases | ConvertFrom-Json)[0].tag_name - -$download = "https://github.com/$repo/releases/download/$tag/$file" -$name = $file.Split(".")[0] -$zip = "c:\$name-$tag.zip" - -Write-Host Downloading latest release -Invoke-WebRequest $download -Out $zip - -Write-Host Extracting release files -Expand-Archive -Path $zip -DestinationPath c:\Users\Administrator\Desktop\Dependencies-$tag -Force - -# Removing temp files -Remove-Item $zip -Force diff --git a/packer/windows/install-dependencies.ps1 b/packer/windows/install-dependencies.ps1 deleted file mode 100644 index 2485c6e34a..0000000000 --- a/packer/windows/install-dependencies.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -choco install adoptopenjdk8 -confirm -choco install cmake.portable -confirm -choco install git.install -confirm -choco install dogtail.dotnet3.5sp1 -confirm -choco install nunit.install --version 2.6.4 -confirm -choco install netfx-4.5.2-devpack --allowEmptyChecksums -confirm -choco install nsis -confirm -choco install patch -confirm -choco install gnuwin32-coreutils.portable -confirm -choco install nuget.commandline -confirm diff --git a/packer/windows/install-doxygen.ps1 b/packer/windows/install-doxygen.ps1 deleted file mode 100644 index c1337da88d..0000000000 --- a/packer/windows/install-doxygen.ps1 +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -$package = 'doxygen.install' -$url = 'http://doxygen.nl/files/doxygen-1.8.16-setup.exe' -$sha256 = 'c0d4bb19e87921b4aad2d0962bac1f6664bfb9d0f103658908af76565386c940' - -Import-Module C:\ProgramData\chocolatey\helpers\chocolateyInstaller.psm1 -Install-ChocolateyPackage $package 'exe' '/VERYSILENT' $url -Checksum $sha256 -ChecksumType 'sha256' - -$oldpath = (Get-ItemProperty -Path 'Registry::HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment' -Name PATH).path -$newpath = "$oldpath;C:\Program Files\doxygen\bin" -Set-ItemProperty -Path 'Registry::HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment' -Name PATH -Value $newPath -$ENV:PATH=$newpath diff --git a/packer/windows/install-gemfire.ps1 b/packer/windows/install-gemfire.ps1 deleted file mode 100644 index 1ea3e26a1b..0000000000 --- a/packer/windows/install-gemfire.ps1 +++ /dev/null @@ -1,21 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -$ErrorActionPreference = "Stop" - -mkdir C:\gemfire -cd C:\gemfire -cmake -E tar zxf $Home\gemfire.tar.gz -rm $Home\gemfire.tar.gz diff --git a/packer/windows/install-openssl.ps1 b/packer/windows/install-geode.ps1 similarity index 59% rename from packer/windows/install-openssl.ps1 rename to packer/windows/install-geode.ps1 index f20f510411..de2a58506e 100644 --- a/packer/windows/install-openssl.ps1 +++ b/packer/windows/install-geode.ps1 @@ -14,6 +14,18 @@ # limitations under the License. $ErrorActionPreference = "Stop" -Import-Module Packer -Force -Install-Package https://slproweb.com/download/Win64OpenSSL-1_1_1d.exe -ArgumentList /silent +write-host "Installing Geode..." + +$GEODE_VERSION = "1.12.0" + +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +Invoke-WebRequest -Uri "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" -OutFile "${env:TEMP}\geode.tgz" + +cd \ +cmake -E tar zxf "${env:TEMP}\geode.tgz" +rm "${env:TEMP}\geode.tgz" + +[System.Environment]::SetEnvironmentVariable('GEODE_HOME', "C:\apache-geode-${GEODE_VERSION}", [System.EnvironmentVariableTarget]::Machine) + +write-host "Installed Geode." diff --git a/packer/windows/install-ssh.ps1 b/packer/windows/install-ssh.ps1 index 0b4165b7c5..bb2e4a5084 100644 --- a/packer/windows/install-ssh.ps1 +++ b/packer/windows/install-ssh.ps1 @@ -17,10 +17,12 @@ $ErrorActionPreference = "Stop" -write-host "Installing OpenSSH" +write-host "Installing OpenSSH..." + choco install openssh -params '/SSHServerFeature' -confirm (Get-Content -Path $Env:ProgramData\ssh\sshd_config -Raw) -replace '.*Match Group administrators.*','' -replace '.*administrators_authorized_keys.*','' | Set-Content -Path $Env:ProgramData\ssh\sshd_config schtasks.exe /Create /TN init-ssh /RU SYSTEM /SC ONSTART /TR "powershell.exe -File '${Env:ProgramData}\Amazon\EC2-Windows\Launch\Scripts\init-ssh.ps1'" +write-host "Installed OpenSSH." diff --git a/packer/windows/install-vs-2017-community.ps1 b/packer/windows/install-vs-2017-community.ps1 index 8c0e196f74..c7bad41ba9 100644 --- a/packer/windows/install-vs-2017-community.ps1 +++ b/packer/windows/install-vs-2017-community.ps1 @@ -15,13 +15,11 @@ # TODO AdminDeploy.xml # vs_community.exe /AdminFile C:\Users\Administrator\AdminDeployment.xml /Log setup.log /Passive -Set-PSDebug -Trace 0 +$ErrorActionPreference = "Stop" -$vs_community_bootstrapper_uri = "https://download.visualstudio.microsoft.com/download/pr/5df30b3f-9db2-4195-bce3-c5518277da5d/18edc9dd7697111f993c5c06f18b51e5/vs_community.exe" -$vs_community_bootstrapper = "C:\vs_community.exe" +write-host "Installing Visual Studio 2017 Community..." -$args = @('--installPath "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community"' - '--add microsoft.net.component.4.targetingpack' +$args = @('--add microsoft.net.component.4.targetingpack' '--add microsoft.net.component.4.5.1.targetingpack' '--add microsoft.visualstudio.component.debugger.justintime' '--add microsoft.visualstudio.component.web' @@ -43,8 +41,9 @@ $args = @('--installPath "C:\Program Files (x86)\Microsoft Visual Studio\2017\Co '--quiet' ) -Invoke-WebRequest -Uri $vs_community_bootstrapper_uri -OutFile $vs_community_bootstrapper +choco install visualstudio2017community -confirm --package-parameters "$args" -Start-Process -Filepath $vs_community_bootstrapper -ArgumentList $args -Wait +write-host "Installed Visual Studio 2017 Community." +# Avoids reboot error code Exit 0 diff --git a/packer/windows/setup-ec2config.ps1 b/packer/windows/setup-ec2config.ps1 deleted file mode 100644 index 04124b183c..0000000000 --- a/packer/windows/setup-ec2config.ps1 +++ /dev/null @@ -1,33 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Enable the system password to be retrieved from the AWS Console after this AMI is built and used to launch code -$ec2config = [xml] (get-content 'C:\Program Files\Amazon\Ec2ConfigService\Settings\config.xml') -($ec2config.ec2configurationsettings.plugins.plugin | where {$_.name -eq "Ec2SetPassword"}).state = "Enabled" -($ec2config.ec2configurationsettings.plugins.plugin | where {$_.name -eq "Ec2DynamicBootVolumeSize"}).state = "Enabled" -($ec2config.ec2configurationsettings.plugins.plugin | where {$_.name -eq "Ec2HandleUserData"}).state = "Enabled" -($ec2config.ec2configurationsettings.plugins.plugin | where {$_.name -eq "Ec2SetComputerName"}).state = "Enabled" -($ec2config.ec2configurationsettings.plugins.plugin | where {$_.name -eq "Ec2EventLog"}).state = "Enabled" -$ec2config.save("C:\Program Files\Amazon\Ec2ConfigService\Settings\config.xml") -$ec2DiskConfig = [xml] (get-content 'C:\Program Files\Amazon\Ec2ConfigService\Settings\DriveLetterConfig.xml') -$mappingElement = $ec2DiskConfig.SelectNodes("DriveLetterMapping")[0].Mapping; -if (!$mappingElement) { - $mappingElement = $ec2DiskConfig.SelectNodes("DriveLetterMapping")[0].AppendChild($ec2DiskConfig.CreateElement("Mapping")) - $volumeNameElement = $mappingElement.AppendChild($ec2DiskConfig.CreateElement("VolumeName")) - $volumeName = $volumeNameElement.AppendChild($ec2DiskConfig.CreateTextNode("Temporary Storage 0")) - $driveLetterElement = $mappingElement.AppendChild($ec2DiskConfig.CreateElement("DriveLetter")) - $driveLetter = $driveLetterElement.AppendChild($ec2DiskConfig.CreateTextNode("D:")) - $ec2DiskConfig.save("C:\Program Files\Amazon\Ec2ConfigService\Settings\DriveLetterConfig.xml") -} \ No newline at end of file diff --git a/packer/windows/uninstall-doxygen.ps1 b/packer/windows/uninstall-doxygen.ps1 deleted file mode 100644 index 5175f1d381..0000000000 --- a/packer/windows/uninstall-doxygen.ps1 +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Enable the system password to be retrieved from the AWS Console after this AMI is built and used to launch code - -$package = 'doxygen.install' -$uninstallRegKey = 'HKLM:SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\doxygen_is1' - -Import-Module C:\ProgramData\chocolatey\helpers\chocolateyInstaller.psm1 -$uninstallPath = (Get-ItemProperty $uninstallRegKey UninstallString).UninstallString -Uninstall-ChocolateyPackage $package 'exe' '/VERYSILENT' $uninstallPath diff --git a/packer/windows/vs-2015-admin.xml b/packer/windows/vs-2015-admin.xml deleted file mode 100644 index 867ff0f400..0000000000 --- a/packer/windows/vs-2015-admin.xml +++ /dev/null @@ -1,100 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/packer/windows/winrm.cloud-init b/packer/windows/winrm.cloud-init deleted file mode 100644 index 8195a8e0ce..0000000000 --- a/packer/windows/winrm.cloud-init +++ /dev/null @@ -1,26 +0,0 @@ - -$ErrorActionPreference = "Stop"; - -Set-ExecutionPolicy Unrestricted - -write-host "Setting up WinRM" - -cmd /c winrm quickconfig -q -cmd /c winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="8000"}' -cmd /c winrm set winrm/config/winrs '@{MaxShellsPerUser="5"}' -cmd /c winrm set winrm/config '@{MaxTimeoutms="1800000"}' -cmd /c winrm set winrm/config/service '@{AllowUnencrypted="true"}' -cmd /c winrm set winrm/config/service/auth '@{Basic="true"}' - -cmd /c net stop winrm -# Will restart on reboot - -cmd /c netsh advfirewall firewall add rule name="WinRM 5985" protocol=TCP dir=in localport=5985 action=allow -cmd /c netsh advfirewall firewall add rule name="WinRM 5986" protocol=TCP dir=in localport=5986 action=allow - -cmd /c sc config winrm start= auto - -write-host "WinRM ready" -cmd /c net start winrm - - \ No newline at end of file From d2c71f659ff2377185535b4c0e68aa11d3b37134 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 6 Jun 2020 10:23:17 -0700 Subject: [PATCH 003/155] Use AWS creds form env or file. --- packer/build-rhel-7.json | 2 -- packer/build-rhel-8.json | 2 -- packer/build-ubuntu-16.04.json | 2 -- packer/build-ubuntu-18.04.json | 2 -- packer/build-windows-vs-2017.json | 2 -- packer/windows-2016-base.json | 2 -- packer/windows-2016-vs-2017.json | 2 -- 7 files changed, 14 deletions(-) diff --git a/packer/build-rhel-7.json b/packer/build-rhel-7.json index f5726e22c2..f2ed452c75 100644 --- a/packer/build-rhel-7.json +++ b/packer/build-rhel-7.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "t2.micro", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", diff --git a/packer/build-rhel-8.json b/packer/build-rhel-8.json index b241987e16..8376c10fd4 100644 --- a/packer/build-rhel-8.json +++ b/packer/build-rhel-8.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "t2.micro", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", diff --git a/packer/build-ubuntu-16.04.json b/packer/build-ubuntu-16.04.json index 77e51357f7..b947eee912 100644 --- a/packer/build-ubuntu-16.04.json +++ b/packer/build-ubuntu-16.04.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "t2.micro", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", diff --git a/packer/build-ubuntu-18.04.json b/packer/build-ubuntu-18.04.json index 7c10e060ee..bf1b044b24 100644 --- a/packer/build-ubuntu-18.04.json +++ b/packer/build-ubuntu-18.04.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "t2.micro", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", diff --git a/packer/build-windows-vs-2017.json b/packer/build-windows-vs-2017.json index c36e27f387..699360d587 100644 --- a/packer/build-windows-vs-2017.json +++ b/packer/build-windows-vs-2017.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "c5d.2xlarge", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", diff --git a/packer/windows-2016-base.json b/packer/windows-2016-base.json index 52494f0137..955af814ca 100644 --- a/packer/windows-2016-base.json +++ b/packer/windows-2016-base.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "c5d.2xlarge", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", diff --git a/packer/windows-2016-vs-2017.json b/packer/windows-2016-vs-2017.json index 34bf24c364..13d55575d8 100644 --- a/packer/windows-2016-vs-2017.json +++ b/packer/windows-2016-vs-2017.json @@ -12,8 +12,6 @@ "type": "amazon-ebs", "instance_type": "c5d.2xlarge", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "access_key": "{{user `aws_access_key`}}", - "secret_key": "{{user `aws_secret_key`}}", "region": "{{user `aws_region`}}", "source_ami": "{{user `source_image`}}", "subnet_id": "{{user `subnet_id`}}", From 45a61ae3c0ca538527951c6c4ab3eca35dd4a855 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 6 Jun 2020 13:49:53 -0700 Subject: [PATCH 004/155] Finds source image by family or matching. --- packer/build-rhel-7.json | 24 ++++++++++++++++-------- packer/build-rhel-8.json | 24 ++++++++++++++++-------- packer/build-ubuntu-16.04.json | 23 +++++++++++++++-------- packer/build-ubuntu-18.04.json | 23 +++++++++++++++-------- packer/build-windows-vs-2017.json | 23 +++++++++++++++-------- packer/windows-2016-base.json | 25 ++++++++++++++++--------- packer/windows-2016-vs-2017.json | 25 ++++++++++++++++--------- 7 files changed, 109 insertions(+), 58 deletions(-) diff --git a/packer/build-rhel-7.json b/packer/build-rhel-7.json index f2ed452c75..657631daa3 100644 --- a/packer/build-rhel-7.json +++ b/packer/build-rhel-7.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "build-rhel-7" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "RHEL-7.7_HVM-*-x86_64-*-Hourly2-GP2", + "root-device-type": "ebs" + }, + "owners": [ + "309956199498" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "ssh_username": "ec2-user", "ssh_pty": true @@ -29,14 +37,14 @@ "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "rhel-7", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, + "disk_size": "20", "ssh_username": "packer" } ], diff --git a/packer/build-rhel-8.json b/packer/build-rhel-8.json index 8376c10fd4..a7c108d183 100644 --- a/packer/build-rhel-8.json +++ b/packer/build-rhel-8.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "build-rhel-8" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "RHEL-8.2.0_HVM-*-x86_64-*-Hourly2-GP2", + "root-device-type": "ebs" + }, + "owners": [ + "309956199498" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "ssh_username": "ec2-user", "ssh_pty": true @@ -29,14 +37,14 @@ "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "rhel-8", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, + "disk_size": "20", "ssh_username": "packer" } ], diff --git a/packer/build-ubuntu-16.04.json b/packer/build-ubuntu-16.04.json index b947eee912..c5dcb18dfb 100644 --- a/packer/build-ubuntu-16.04.json +++ b/packer/build-ubuntu-16.04.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "build-ubuntu-16-04" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "ubuntu/images/hvm-ssd/ubuntu-xenial-16.04-amd64-server-*", + "root-device-type": "ebs" + }, + "owners": [ + "099720109477" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "ssh_username": "ubuntu", "ssh_pty": true @@ -29,13 +37,12 @@ "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "ubuntu-1604-lts", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "ssh_username": "packer" } diff --git a/packer/build-ubuntu-18.04.json b/packer/build-ubuntu-18.04.json index bf1b044b24..90198d6ddd 100644 --- a/packer/build-ubuntu-18.04.json +++ b/packer/build-ubuntu-18.04.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "build-ubuntu-18-04" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-*", + "root-device-type": "ebs" + }, + "owners": [ + "099720109477" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "ssh_username": "ubuntu", "ssh_pty": true @@ -29,13 +37,12 @@ "machine_type": "n1-standard-1", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "ubuntu-1804-lts", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "ssh_username": "packer" } diff --git a/packer/build-windows-vs-2017.json b/packer/build-windows-vs-2017.json index 699360d587..2c1d70cf4c 100644 --- a/packer/build-windows-vs-2017.json +++ b/packer/build-windows-vs-2017.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "build-windows-vs-2017" }, "builders": [ { "type": "amazon-ebs", "instance_type": "c5d.2xlarge", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "{{user `product_name`}}-{{user `version`}}-windows-2016-vs-2017-*", + "root-device-type": "ebs" + }, + "owners": [ + "self" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_ami`}}" + "version": "{{user `version`}}" }, "communicator": "ssh", "ssh_username": "Administrator", @@ -38,13 +46,12 @@ "machine_type": "n1-standard-2", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "{{user `product_name`}}-{{user `version`}}-windows-2016-vs-2017", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "disk_size": "100", "communicator": "winrm", diff --git a/packer/windows-2016-base.json b/packer/windows-2016-base.json index 955af814ca..43d2b1e0a3 100644 --- a/packer/windows-2016-base.json +++ b/packer/windows-2016-base.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "windows-2016-base" }, "builders": [ { "type": "amazon-ebs", "instance_type": "c5d.2xlarge", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "Windows_Server-2016-English-Full-Base-*", + "root-device-type": "ebs" + }, + "owners": [ + "amazon" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_ami`}}" + "version": "{{user `version`}}" }, "user_data_file": "windows/enable-winrm.ps1", "communicator": "winrm", @@ -29,16 +37,15 @@ }, { "type": "googlecompute", - "machine_type": "n1-standard-1", + "machine_type": "n1-standard-2", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "windows-2016", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "disk_size": "50", "metadata": { diff --git a/packer/windows-2016-vs-2017.json b/packer/windows-2016-vs-2017.json index 13d55575d8..1c1a933d07 100644 --- a/packer/windows-2016-vs-2017.json +++ b/packer/windows-2016-vs-2017.json @@ -3,23 +3,31 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "source_image": "", - "source_image_name": "", "image_name": "windows-2016-vs-2017" }, "builders": [ { "type": "amazon-ebs", "instance_type": "c5d.2xlarge", + "ami_virtualization_type": "hvm", "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "region": "{{user `aws_region`}}", - "source_ami": "{{user `source_image`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "{{user `product_name`}}-{{user `version`}}-windows-2016-base-*", + "root-device-type": "ebs" + }, + "owners": [ + "self" + ], + "most_recent": true + }, "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_ami`}}" + "version": "{{user `version`}}" }, "communicator": "ssh", "ssh_username": "Administrator", @@ -35,16 +43,15 @@ }, { "type": "googlecompute", - "machine_type": "n1-standard-2", + "machine_type": "n1-standard-8", "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", - "source_image": "{{user `source_image`}}", + "source_image_family": "{{user `product_name`}}-{{user `version`}}-windows-2016-base", "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", "image_labels": { "product": "{{user `product_name`}}", - "version": "{{user `version`}}", - "source_ami": "{{user `source_image`}}" + "version": "{{user `version`}}" }, "disk_size": "50", "communicator": "winrm", From 12e8493bc2b91f35e3a066d48a2ed0ec33879ed1 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 7 Jun 2020 14:06:50 -0700 Subject: [PATCH 005/155] Cleanup simplify Windows image. --- ...e.json => build-windows-2016-vs-2017.json} | 49 ++++++-- packer/build-windows-vs-2017.json | 110 ------------------ packer/windows-2016-vs-2017.json | 101 ---------------- packer/windows/install-vs-2017-community.ps1 | 30 ++--- 4 files changed, 53 insertions(+), 237 deletions(-) rename packer/{windows-2016-base.json => build-windows-2016-vs-2017.json} (68%) delete mode 100644 packer/build-windows-vs-2017.json delete mode 100644 packer/windows-2016-vs-2017.json diff --git a/packer/windows-2016-base.json b/packer/build-windows-2016-vs-2017.json similarity index 68% rename from packer/windows-2016-base.json rename to packer/build-windows-2016-vs-2017.json index 43d2b1e0a3..d9d85f49c8 100644 --- a/packer/windows-2016-base.json +++ b/packer/build-windows-2016-vs-2017.json @@ -3,7 +3,7 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "windows-2016-base" + "image_name": "build-windows-2016-vs-2017" }, "builders": [ { @@ -29,11 +29,17 @@ "product": "{{user `product_name`}}", "version": "{{user `version`}}" }, - "user_data_file": "windows/enable-winrm.ps1", - "communicator": "winrm", - "winrm_username": "Administrator", - "winrm_insecure": true, - "winrm_use_ssl": true + "communicator": "ssh", + "ssh_username": "Administrator", + "ssh_handshake_attempts": 100, + "ssh_timeout": "15m", + "launch_block_device_mappings": [ + { + "device_name": "/dev/sda1", + "delete_on_termination": true, + "volume_size": 100 + } + ] }, { "type": "googlecompute", @@ -47,7 +53,7 @@ "product": "{{user `product_name`}}", "version": "{{user `version`}}" }, - "disk_size": "50", + "disk_size": "100", "metadata": { "windows-startup-script-cmd": "winrm quickconfig -quiet & net user /add packer_user & net localgroup administrators packer_user /add & winrm set winrm/config/service/auth @{Basic=\"true\"}" }, @@ -82,6 +88,35 @@ "amazon-ebs" ] }, + { + "type": "powershell", + "scripts": [ + "windows/install-vs-2017-community.ps1" + ] + }, + { + "type": "powershell", + "inline": [ + "choco install git.install -confirm", + "choco install cmake.portable -confirm", + "# TODO Old CLI tests aren't compatible with Java 11", + "choco install liberica8jdk -confirm", + "choco install doxygen.install -confirm", + "choco install openssl -confirm", + "choco install strawberryperl -confirm", + "choco install nuget.commandline -confirm", + "# For NUnit 2.6", + "choco install dotnet3.5 -confirm", + "# TODO make this a nuget dependency", + "choco install nunit.install --version 2.6.4 -confirm" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/install-geode.ps1" + ] + }, { "type": "powershell", "scripts": [ diff --git a/packer/build-windows-vs-2017.json b/packer/build-windows-vs-2017.json deleted file mode 100644 index 2c1d70cf4c..0000000000 --- a/packer/build-windows-vs-2017.json +++ /dev/null @@ -1,110 +0,0 @@ -{ - "variables": { - "aws_region": "", - "googlecompute_zone": "", - "googlecompute_project": "", - "image_name": "build-windows-vs-2017" - }, - "builders": [ - { - "type": "amazon-ebs", - "instance_type": "c5d.2xlarge", - "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "region": "{{user `aws_region`}}", - "source_ami_filter": { - "filters": { - "virtualization-type": "hvm", - "name": "{{user `product_name`}}-{{user `version`}}-windows-2016-vs-2017-*", - "root-device-type": "ebs" - }, - "owners": [ - "self" - ], - "most_recent": true - }, - "subnet_id": "{{user `subnet_id`}}", - "vpc_id": "{{user `vpc_id`}}", - "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" - }, - "communicator": "ssh", - "ssh_username": "Administrator", - "ssh_handshake_attempts": 100, - "ssh_timeout": "15m", - "launch_block_device_mappings": [ - { - "device_name": "/dev/sda1", - "delete_on_termination": true, - "volume_size": 100 - } - ] - }, - { - "type": "googlecompute", - "machine_type": "n1-standard-2", - "project_id": "{{user `googlecompute_project`}}", - "zone": "{{user `googlecompute_zone`}}", - "source_image_family": "{{user `product_name`}}-{{user `version`}}-windows-2016-vs-2017", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", - "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" - }, - "disk_size": "100", - "communicator": "winrm", - "winrm_username": "packer_user", - "winrm_insecure": true, - "winrm_use_ssl": true - } - ], - "provisioners": [ - { - "type": "powershell", - "inline": [ - "choco install liberica11jdk -confirm", - "choco install cmake.portable -confirm", - "choco install git.install -confirm", - "choco install dogtail.dotnet3.5sp1 -confirm", - "choco install nunit.install --version 2.6.4 -confirm", - "choco install netfx-4.5.2-devpack --allowEmptyChecksums -confirm", - "choco install nuget.commandline -confirm", - "choco install doxygen.install -confirm", - "choco install openssl -confirm", - "choco install strawberryperl -confirm" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/install-geode.ps1" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/setup-ec2launch.ps1" - ], - "only": [ - "amazon-ebs" - ] - }, - { - "type": "powershell", - "inline": [ - "GCESysprep -NoShutdown" - ], - "only": [ - "googlecompute" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/cleanup.ps1" - ] - } - ] -} diff --git a/packer/windows-2016-vs-2017.json b/packer/windows-2016-vs-2017.json deleted file mode 100644 index 1c1a933d07..0000000000 --- a/packer/windows-2016-vs-2017.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "variables": { - "aws_region": "", - "googlecompute_zone": "", - "googlecompute_project": "", - "image_name": "windows-2016-vs-2017" - }, - "builders": [ - { - "type": "amazon-ebs", - "instance_type": "c5d.2xlarge", - "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "region": "{{user `aws_region`}}", - "source_ami_filter": { - "filters": { - "virtualization-type": "hvm", - "name": "{{user `product_name`}}-{{user `version`}}-windows-2016-base-*", - "root-device-type": "ebs" - }, - "owners": [ - "self" - ], - "most_recent": true - }, - "subnet_id": "{{user `subnet_id`}}", - "vpc_id": "{{user `vpc_id`}}", - "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" - }, - "communicator": "ssh", - "ssh_username": "Administrator", - "ssh_handshake_attempts": 100, - "ssh_timeout": "15m", - "launch_block_device_mappings": [ - { - "device_name": "/dev/sda1", - "delete_on_termination": true, - "volume_size": 100 - } - ] - }, - { - "type": "googlecompute", - "machine_type": "n1-standard-8", - "project_id": "{{user `googlecompute_project`}}", - "zone": "{{user `googlecompute_zone`}}", - "source_image_family": "{{user `product_name`}}-{{user `version`}}-windows-2016-base", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", - "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" - }, - "disk_size": "50", - "communicator": "winrm", - "winrm_username": "packer_user", - "winrm_insecure": true, - "winrm_use_ssl": true - } - ], - "provisioners": [ - { - "type": "powershell", - "scripts": [ - "windows/install-vs-2017-community.ps1" - ] - }, - { - "type": "powershell", - "inline": [ - "Install-WindowsFeature -name NET-Framework-Core" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/setup-ec2launch.ps1" - ], - "only": [ - "amazon-ebs" - ] - }, - { - "type": "powershell", - "inline": [ - "GCESysprep -NoShutdown" - ], - "only": [ - "googlecompute" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/cleanup.ps1" - ] - } - ] -} diff --git a/packer/windows/install-vs-2017-community.ps1 b/packer/windows/install-vs-2017-community.ps1 index c7bad41ba9..ce1f760626 100644 --- a/packer/windows/install-vs-2017-community.ps1 +++ b/packer/windows/install-vs-2017-community.ps1 @@ -19,25 +19,17 @@ $ErrorActionPreference = "Stop" write-host "Installing Visual Studio 2017 Community..." -$args = @('--add microsoft.net.component.4.targetingpack' - '--add microsoft.net.component.4.5.1.targetingpack' - '--add microsoft.visualstudio.component.debugger.justintime' - '--add microsoft.visualstudio.component.web' - '--add microsoft.visualstudio.component.vc.coreide' - '--add microsoft.visualstudio.component.vc.redist.14.latest' - '--add microsoft.visualstudio.component.graphics.win81' - '--add microsoft.visualstudio.component.vc.cmake.project' - '--add microsoft.visualstudio.component.vc.testadapterforgoogletest' - '--add microsoft.component.vc.runtime.ucrtsdk' - '--add microsoft.visualstudio.component.windows81sdk' - '--add microsoft.visualstudio.component.vc.cli.support' - '--add microsoft.visualstudio.component.windows10sdk.17134' - '--add microsoft.visualstudio.component.windows10sdk.16299.desktop' - '--add microsoft.visualstudio.component.webdeploy' - '--add microsoft.component.pythontools' - '--add component.cpython2.x64' - '--add microsoft.net.component.3.5.developertools' - '--add microsoft.visualstudio.component.typescript.3.0' +$args = @('--add microsoft.visualstudio.component.debugger.justintime', + '--add microsoft.visualstudio.component.web', + '--add microsoft.visualstudio.component.vc.coreide', + '--add microsoft.visualstudio.component.vc.redist.14.latest', + '--add microsoft.visualstudio.component.vc.tools.x86.x64', + '--add microsoft.visualstudio.component.windows10sdk.17763', + '--add microsoft.visualstudio.component.vc.testadapterforgoogletest', + '--add microsoft.component.vc.runtime.ucrtsdk', + '--add microsoft.visualstudio.component.vc.cli.support', + '--add microsoft.visualstudio.component.windows10sdk.16299.desktop', + '--add microsoft.visualstudio.component.webdeploy' '--quiet' ) From 5daa867e9f6ae7acf5eee06c31e2a7c97303a790 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 10 Jun 2020 20:12:18 -0700 Subject: [PATCH 006/155] Fixes Windows issues on AWS. Fixes Windows SSH on Google. --- packer/build-windows-2016-vs-2017.json | 57 +++++++++++-------- packer/windows/aws/cleanup.ps1 | 17 ++++++ packer/windows/{ => aws}/enable-winrm.ps1 | 0 packer/windows/{ => aws}/init-ssh.ps1 | 0 packer/windows/{ => aws}/setup-ec2launch.ps1 | 0 .../{install-ssh.ps1 => aws/setup-ssh.ps1} | 2 - packer/windows/cleanup.ps1 | 3 - 7 files changed, 51 insertions(+), 28 deletions(-) create mode 100644 packer/windows/aws/cleanup.ps1 rename packer/windows/{ => aws}/enable-winrm.ps1 (100%) rename packer/windows/{ => aws}/init-ssh.ps1 (100%) rename packer/windows/{ => aws}/setup-ec2launch.ps1 (100%) rename packer/windows/{install-ssh.ps1 => aws/setup-ssh.ps1} (95%) diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index d9d85f49c8..3b0d3d5355 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -29,17 +29,18 @@ "product": "{{user `product_name`}}", "version": "{{user `version`}}" }, - "communicator": "ssh", - "ssh_username": "Administrator", - "ssh_handshake_attempts": 100, - "ssh_timeout": "15m", "launch_block_device_mappings": [ { "device_name": "/dev/sda1", "delete_on_termination": true, "volume_size": 100 } - ] + ], + "user_data_file": "windows/aws/enable-winrm.ps1", + "communicator": "winrm", + "winrm_username": "Administrator", + "winrm_insecure": true, + "winrm_use_ssl": true }, { "type": "googlecompute", @@ -71,23 +72,6 @@ "windows/install-chocolatey.ps1" ] }, - { - "type": "file", - "source": "windows/init-ssh.ps1", - "destination": "$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1", - "only": [ - "amazon-ebs" - ] - }, - { - "type": "powershell", - "scripts": [ - "windows/install-ssh.ps1" - ], - "only": [ - "amazon-ebs" - ] - }, { "type": "powershell", "scripts": [ @@ -97,6 +81,7 @@ { "type": "powershell", "inline": [ + "choco install openssh -params '/SSHServerFeature' -confirm", "choco install git.install -confirm", "choco install cmake.portable -confirm", "# TODO Old CLI tests aren't compatible with Java 11", @@ -117,10 +102,27 @@ "windows/install-geode.ps1" ] }, + { + "type": "file", + "source": "windows/aws/init-ssh.ps1", + "destination": "$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1", + "only": [ + "amazon-ebs" + ] + }, { "type": "powershell", "scripts": [ - "windows/setup-ec2launch.ps1" + "windows/aws/setup-ssh.ps1" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/aws/setup-ec2launch.ps1" ], "only": [ "amazon-ebs" @@ -135,6 +137,15 @@ "googlecompute" ] }, + { + "type": "powershell", + "scripts": [ + "windows/aws/cleanup.ps1" + ], + "only": [ + "amazon-ebs" + ] + }, { "type": "powershell", "scripts": [ diff --git a/packer/windows/aws/cleanup.ps1 b/packer/windows/aws/cleanup.ps1 new file mode 100644 index 0000000000..08d3a5ada7 --- /dev/null +++ b/packer/windows/aws/cleanup.ps1 @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Remove admin ssh keys +Remove-Item C:\Users\Administrator\.ssh -Recurse -Force -ErrorAction SilentlyContinue diff --git a/packer/windows/enable-winrm.ps1 b/packer/windows/aws/enable-winrm.ps1 similarity index 100% rename from packer/windows/enable-winrm.ps1 rename to packer/windows/aws/enable-winrm.ps1 diff --git a/packer/windows/init-ssh.ps1 b/packer/windows/aws/init-ssh.ps1 similarity index 100% rename from packer/windows/init-ssh.ps1 rename to packer/windows/aws/init-ssh.ps1 diff --git a/packer/windows/setup-ec2launch.ps1 b/packer/windows/aws/setup-ec2launch.ps1 similarity index 100% rename from packer/windows/setup-ec2launch.ps1 rename to packer/windows/aws/setup-ec2launch.ps1 diff --git a/packer/windows/install-ssh.ps1 b/packer/windows/aws/setup-ssh.ps1 similarity index 95% rename from packer/windows/install-ssh.ps1 rename to packer/windows/aws/setup-ssh.ps1 index bb2e4a5084..c9c7307ec3 100644 --- a/packer/windows/install-ssh.ps1 +++ b/packer/windows/aws/setup-ssh.ps1 @@ -19,8 +19,6 @@ $ErrorActionPreference = "Stop" write-host "Installing OpenSSH..." -choco install openssh -params '/SSHServerFeature' -confirm - (Get-Content -Path $Env:ProgramData\ssh\sshd_config -Raw) -replace '.*Match Group administrators.*','' -replace '.*administrators_authorized_keys.*','' | Set-Content -Path $Env:ProgramData\ssh\sshd_config schtasks.exe /Create /TN init-ssh /RU SYSTEM /SC ONSTART /TR "powershell.exe -File '${Env:ProgramData}\Amazon\EC2-Windows\Launch\Scripts\init-ssh.ps1'" diff --git a/packer/windows/cleanup.ps1 b/packer/windows/cleanup.ps1 index c6a9366eda..abe9af153c 100644 --- a/packer/windows/cleanup.ps1 +++ b/packer/windows/cleanup.ps1 @@ -13,9 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Remove admin ssh keys -Remove-Item C:\Users\Administrator\.ssh -Recurse -Force -ErrorAction SilentlyContinue - # Cleanup temp Get-ChildItem $env:tmp -Recurse | Remove-Item -Recurse -force -ErrorAction SilentlyContinue Get-ChildItem ([environment]::GetEnvironmentVariable("temp","machine")) -Recurse| Remove-Item -Recurse -Force -ErrorAction SilentlyContinue From 44c157e52b46723d8dadec86727f063f4c1f00bd Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 11 Jun 2020 07:26:44 -0700 Subject: [PATCH 007/155] Use Administrator account consistently on Windows in AWS and Google. --- packer/build-windows-2016-vs-2017.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index 3b0d3d5355..7bf16ca5a7 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -56,10 +56,10 @@ }, "disk_size": "100", "metadata": { - "windows-startup-script-cmd": "winrm quickconfig -quiet & net user /add packer_user & net localgroup administrators packer_user /add & winrm set winrm/config/service/auth @{Basic=\"true\"}" + "windows-startup-script-cmd": "net user Administrator /active:yes & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"}" }, "communicator": "winrm", - "winrm_username": "packer_user", + "winrm_username": "Administrator", "winrm_insecure": true, "winrm_use_ssl": true } From 17acab8bb08ea945c6acdc6dcc3dfd682042c633 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 11 Jun 2020 11:31:13 -0700 Subject: [PATCH 008/155] Fixes install of .NET 3.5 via WinRM --- packer/build-windows-2016-vs-2017.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index 7bf16ca5a7..a2dacc810f 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -91,7 +91,7 @@ "choco install strawberryperl -confirm", "choco install nuget.commandline -confirm", "# For NUnit 2.6", - "choco install dotnet3.5 -confirm", + "choco install dogtail.dotnet3.5sp1 -confirm", "# TODO make this a nuget dependency", "choco install nunit.install --version 2.6.4 -confirm" ] From e2ff08be9ee8010193bfbca3a27b6a48146fb2ba Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 11 Jun 2020 12:47:38 -0700 Subject: [PATCH 009/155] Adds support for Windows 2019. Fixes SSH key init in AWS. --- packer/build-windows-2016-vs-2017.json | 9 -- packer/build-windows-2019-vs-2017.json | 147 ++++++++++++++++++ .../{aws/cleanup.ps1 => 2019/install-ssh.ps1} | 21 ++- packer/windows/aws/enable-winrm.ps1 | 3 + packer/windows/aws/init-ssh.ps1 | 17 +- packer/windows/aws/setup-ssh.ps1 | 2 - packer/windows/cleanup.ps1 | 4 + 7 files changed, 176 insertions(+), 27 deletions(-) create mode 100644 packer/build-windows-2019-vs-2017.json rename packer/windows/{aws/cleanup.ps1 => 2019/install-ssh.ps1} (61%) diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index a2dacc810f..73b3ce3b4f 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -137,15 +137,6 @@ "googlecompute" ] }, - { - "type": "powershell", - "scripts": [ - "windows/aws/cleanup.ps1" - ], - "only": [ - "amazon-ebs" - ] - }, { "type": "powershell", "scripts": [ diff --git a/packer/build-windows-2019-vs-2017.json b/packer/build-windows-2019-vs-2017.json new file mode 100644 index 0000000000..9886227f30 --- /dev/null +++ b/packer/build-windows-2019-vs-2017.json @@ -0,0 +1,147 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "image_name": "build-windows-2019-vs-2017" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "c5d.2xlarge", + "ami_virtualization_type": "hvm", + "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "region": "{{user `aws_region`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "Windows_Server-2019-English-Full-Base-*", + "root-device-type": "ebs" + }, + "owners": [ + "amazon" + ], + "most_recent": true + }, + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}" + }, + "launch_block_device_mappings": [ + { + "device_name": "/dev/sda1", + "delete_on_termination": true, + "volume_size": 100 + } + ], + "user_data_file": "windows/aws/enable-winrm.ps1", + "communicator": "winrm", + "winrm_username": "Administrator", + "winrm_insecure": true, + "winrm_use_ssl": true + }, + { + "type": "googlecompute", + "machine_type": "n1-standard-2", + "project_id": "{{user `googlecompute_project`}}", + "zone": "{{user `googlecompute_zone`}}", + "source_image_family": "windows-2019", + "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_labels": { + "product": "{{user `product_name`}}", + "version": "{{user `version`}}" + }, + "disk_size": "100", + "metadata": { + "windows-startup-script-cmd": "dism /Online /Add-Capability /CapabilityName:OpenSSH.Server~~~~0.0.1.0 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" + }, + "communicator": "winrm", + "winrm_username": "Administrator", + "winrm_insecure": true, + "winrm_use_ssl": true + } + ], + "provisioners": [ + { + "type": "powershell", + "scripts": [ + "windows/disable-uac.ps1", + "windows/2019/install-ssh.ps1", + "windows/install-chocolatey.ps1" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/install-vs-2017-community.ps1" + ] + }, + { + "type": "powershell", + "inline": [ + "choco install git.install -confirm", + "choco install cmake.portable -confirm", + "# TODO Old CLI tests aren't compatible with Java 11", + "choco install liberica8jdk -confirm", + "choco install doxygen.install -confirm", + "choco install openssl -confirm", + "choco install strawberryperl -confirm", + "choco install nuget.commandline -confirm", + "# For NUnit 2.6", + "choco install dogtail.dotnet3.5sp1 -confirm", + "# TODO make this a nuget dependency", + "choco install nunit.install --version 2.6.4 -confirm" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/install-geode.ps1" + ] + }, + { + "type": "file", + "source": "windows/aws/init-ssh.ps1", + "destination": "$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/aws/setup-ssh.ps1" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/aws/setup-ec2launch.ps1" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "powershell", + "inline": [ + "GCESysprep -NoShutdown" + ], + "only": [ + "googlecompute" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" + ] + } + ] +} diff --git a/packer/windows/aws/cleanup.ps1 b/packer/windows/2019/install-ssh.ps1 similarity index 61% rename from packer/windows/aws/cleanup.ps1 rename to packer/windows/2019/install-ssh.ps1 index 08d3a5ada7..b419298c2f 100644 --- a/packer/windows/aws/cleanup.ps1 +++ b/packer/windows/2019/install-ssh.ps1 @@ -4,14 +4,27 @@ # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# Remove admin ssh keys -Remove-Item C:\Users\Administrator\.ssh -Recurse -Force -ErrorAction SilentlyContinue +# Install sshd + +$ErrorActionPreference = "Stop" + +write-host "Installing OpenSSH..." + +# Can't be installed via WinRM. Installed at startup. +#Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 + +Start-Service sshd +Set-Service -Name sshd -StartupType 'Automatic' +Get-NetFirewallRule -Name *ssh* +New-NetFirewallRule -Name sshd -DisplayName 'OpenSSH Server (sshd)' -Enabled True -Direction Inbound -Protocol TCP -Action Allow -LocalPort 22 + +write-host "Installed OpenSSH." diff --git a/packer/windows/aws/enable-winrm.ps1 b/packer/windows/aws/enable-winrm.ps1 index a694e06951..ebbcf58049 100644 --- a/packer/windows/aws/enable-winrm.ps1 +++ b/packer/windows/aws/enable-winrm.ps1 @@ -8,6 +8,9 @@ Set-ExecutionPolicy Unrestricted -Scope LocalMachine -Force -ErrorAction Ignore # Don't set this before Set-ExecutionPolicy as it throws an error $ErrorActionPreference = "stop" +# Install SSH +Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 + # Remove HTTP listener Remove-Item -Path WSMan:\Localhost\listener\listener* -Recurse diff --git a/packer/windows/aws/init-ssh.ps1 b/packer/windows/aws/init-ssh.ps1 index bff7ea12ce..e431e41de5 100644 --- a/packer/windows/aws/init-ssh.ps1 +++ b/packer/windows/aws/init-ssh.ps1 @@ -13,17 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -$ssh = "C:\Users\Administrator\.ssh" -$authorized_keys = "$ssh\authorized_keys" -if ( -not (Test-Path $authorized_keys -PathType Leaf) ) { +write-host "Installing SSH authorized key..." - write-host "Installing SSH authorized key" +$authorized_keys_file = "${ENV:PROGRAMDATA}\ssh\administrators_authorized_keys" +Invoke-WebRequest -Uri "http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key" -OutFile $authorized_keys_file +icacls "${authorized_keys_file}" /inheritance:r /grant "SYSTEM:(F)" /grant "BUILTIN\Administrators:(F)" - mkdir -p $ssh -ErrorAction SilentlyContinue - - Invoke-WebRequest -Uri "http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key" -OutFile $authorized_keys - - Import-Module "$Env:ProgramFiles\OpenSSH-Win64\OpenSSHUtils" -force - - Repair-AuthorizedKeyPermission $authorized_keys -Confirm:$false -} +write-host "Installed SSH authorized key" diff --git a/packer/windows/aws/setup-ssh.ps1 b/packer/windows/aws/setup-ssh.ps1 index c9c7307ec3..a4288bc95c 100644 --- a/packer/windows/aws/setup-ssh.ps1 +++ b/packer/windows/aws/setup-ssh.ps1 @@ -19,8 +19,6 @@ $ErrorActionPreference = "Stop" write-host "Installing OpenSSH..." -(Get-Content -Path $Env:ProgramData\ssh\sshd_config -Raw) -replace '.*Match Group administrators.*','' -replace '.*administrators_authorized_keys.*','' | Set-Content -Path $Env:ProgramData\ssh\sshd_config - schtasks.exe /Create /TN init-ssh /RU SYSTEM /SC ONSTART /TR "powershell.exe -File '${Env:ProgramData}\Amazon\EC2-Windows\Launch\Scripts\init-ssh.ps1'" write-host "Installed OpenSSH." diff --git a/packer/windows/cleanup.ps1 b/packer/windows/cleanup.ps1 index abe9af153c..0422e661a1 100644 --- a/packer/windows/cleanup.ps1 +++ b/packer/windows/cleanup.ps1 @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Remove admin ssh keys +$authorized_keys_file = "${ENV:PROGRAMDATA}\ssh\administrators_authorized_keys" +Remove-Item "${authorized_keys_file}" -Force -ErrorAction SilentlyContinue + # Cleanup temp Get-ChildItem $env:tmp -Recurse | Remove-Item -Recurse -force -ErrorAction SilentlyContinue Get-ChildItem ([environment]::GetEnvironmentVariable("temp","machine")) -Recurse| Remove-Item -Recurse -Force -ErrorAction SilentlyContinue From 31301dc900665743c03f4d67d68221c5e9943f22 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 11 Jun 2020 13:22:45 -0700 Subject: [PATCH 010/155] Cleanup AWS SSH init script. --- packer/build-windows-2016-vs-2017.json | 4 +-- packer/build-windows-2019-vs-2017.json | 4 +-- packer/windows/aws/init-ssh.ps1 | 39 ++++++++++++++++++++++---- packer/windows/aws/setup-ssh.ps1 | 24 ---------------- 4 files changed, 37 insertions(+), 34 deletions(-) delete mode 100644 packer/windows/aws/setup-ssh.ps1 diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index 73b3ce3b4f..a51bb6b006 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -112,8 +112,8 @@ }, { "type": "powershell", - "scripts": [ - "windows/aws/setup-ssh.ps1" + "inline": [ + "$Env:ProgramData\\Amazon\\EC2-Windows\\Launch\\Scripts\\init-ssh.ps1 -schedule" ], "only": [ "amazon-ebs" diff --git a/packer/build-windows-2019-vs-2017.json b/packer/build-windows-2019-vs-2017.json index 9886227f30..adc923bd99 100644 --- a/packer/build-windows-2019-vs-2017.json +++ b/packer/build-windows-2019-vs-2017.json @@ -112,8 +112,8 @@ }, { "type": "powershell", - "scripts": [ - "windows/aws/setup-ssh.ps1" + "inline": [ + "$Env:ProgramData\\Amazon\\EC2-Windows\\Launch\\Scripts\\init-ssh.ps1 -schedule" ], "only": [ "amazon-ebs" diff --git a/packer/windows/aws/init-ssh.ps1 b/packer/windows/aws/init-ssh.ps1 index e431e41de5..9f2a288cd3 100644 --- a/packer/windows/aws/init-ssh.ps1 +++ b/packer/windows/aws/init-ssh.ps1 @@ -1,4 +1,4 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more + # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 @@ -13,10 +13,37 @@ # See the License for the specific language governing permissions and # limitations under the License. -write-host "Installing SSH authorized key..." +[CmdletBinding(DefaultParameterSetName = 'Default')] +param ( + # Schedules the script to run on the next boot. + # If this argument is not provided, script is executed immediately. + [parameter(Mandatory = $false, ParameterSetName = "Schedule")] + [switch] $Schedule = $false +) -$authorized_keys_file = "${ENV:PROGRAMDATA}\ssh\administrators_authorized_keys" -Invoke-WebRequest -Uri "http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key" -OutFile $authorized_keys_file -icacls "${authorized_keys_file}" /inheritance:r /grant "SYSTEM:(F)" /grant "BUILTIN\Administrators:(F)" +Set-Variable modulePath -Option Constant -Scope Local -Value (Join-Path $env:ProgramData -ChildPath "Amazon\EC2-Windows\Launch\Module\Ec2Launch.psd1") +Set-Variable scriptPath -Option Constant -Scope Local -Value (Join-Path $PSScriptRoot -ChildPath $MyInvocation.MyCommand.Name) +Set-Variable authorizedKeysPath -Option Constant -Scope Local -Value (Join-Path $env:ProgramData -ChildPath "ssh\administrators_authorized_keys") -write-host "Installed SSH authorized key" +Import-Module $modulePath + +Initialize-Log -Filename "Ec2Launch.log" -AllowLogToConsole + +if ($Schedule) { + Write-Log "Sheduling SSH Authorized Keys Initialization..." + Register-ScriptScheduler -ScriptPath $scriptPath -ScheduleName "SSH Authorized Keys Initialization" + Write-Log "Sheduled SSH Authorized Keys Initialization." + + Complete-Log + Exit 0 +} + +Write-Log "Initializating SSH Authorized Keys..." + +Invoke-WebRequest -Uri "http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key" -OutFile $authorizedKeysPath +icacls $authorizedKeysPath /inheritance:r /grant "SYSTEM:(F)" /grant "BUILTIN\Administrators:(F)" + +Write-Log "Initializated SSH Authorized Keys." + +Complete-Log +Exit 0 diff --git a/packer/windows/aws/setup-ssh.ps1 b/packer/windows/aws/setup-ssh.ps1 deleted file mode 100644 index a4288bc95c..0000000000 --- a/packer/windows/aws/setup-ssh.ps1 +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Install sshd - -$ErrorActionPreference = "Stop" - -write-host "Installing OpenSSH..." - -schtasks.exe /Create /TN init-ssh /RU SYSTEM /SC ONSTART /TR "powershell.exe -File '${Env:ProgramData}\Amazon\EC2-Windows\Launch\Scripts\init-ssh.ps1'" - -write-host "Installed OpenSSH." From 2ff94827f7eb87f821db627932c7483d31290c72 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 11 Jun 2020 13:47:19 -0700 Subject: [PATCH 011/155] Install .NET 3.5 via startup. --- packer/build-windows-2016-vs-2017.json | 6 +-- packer/build-windows-2019-vs-2017.json | 6 +-- .../enable-winrm.ps1 => 2016/aws/startup.ps1} | 5 ++- packer/windows/2019/aws/startup.ps1 | 44 +++++++++++++++++++ 4 files changed, 51 insertions(+), 10 deletions(-) rename packer/windows/{aws/enable-winrm.ps1 => 2016/aws/startup.ps1} (93%) create mode 100644 packer/windows/2019/aws/startup.ps1 diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index a51bb6b006..ca3a8eaffc 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -36,7 +36,7 @@ "volume_size": 100 } ], - "user_data_file": "windows/aws/enable-winrm.ps1", + "user_data_file": "windows/2016/aws/startup.ps1", "communicator": "winrm", "winrm_username": "Administrator", "winrm_insecure": true, @@ -56,7 +56,7 @@ }, "disk_size": "100", "metadata": { - "windows-startup-script-cmd": "net user Administrator /active:yes & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"}" + "windows-startup-script-cmd": "dism /Online /NoRestart /Enable-Feature /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" }, "communicator": "winrm", "winrm_username": "Administrator", @@ -90,8 +90,6 @@ "choco install openssl -confirm", "choco install strawberryperl -confirm", "choco install nuget.commandline -confirm", - "# For NUnit 2.6", - "choco install dogtail.dotnet3.5sp1 -confirm", "# TODO make this a nuget dependency", "choco install nunit.install --version 2.6.4 -confirm" ] diff --git a/packer/build-windows-2019-vs-2017.json b/packer/build-windows-2019-vs-2017.json index adc923bd99..5843447257 100644 --- a/packer/build-windows-2019-vs-2017.json +++ b/packer/build-windows-2019-vs-2017.json @@ -36,7 +36,7 @@ "volume_size": 100 } ], - "user_data_file": "windows/aws/enable-winrm.ps1", + "user_data_file": "windows/2019/aws/startup.ps1", "communicator": "winrm", "winrm_username": "Administrator", "winrm_insecure": true, @@ -56,7 +56,7 @@ }, "disk_size": "100", "metadata": { - "windows-startup-script-cmd": "dism /Online /Add-Capability /CapabilityName:OpenSSH.Server~~~~0.0.1.0 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" + "windows-startup-script-cmd": "dism /Online /NoRestart /Add-Capability /CapabilityName:OpenSSH.Server~~~~0.0.1.0 & dism /Online /NoRestart /Enable-Feature /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" }, "communicator": "winrm", "winrm_username": "Administrator", @@ -90,8 +90,6 @@ "choco install openssl -confirm", "choco install strawberryperl -confirm", "choco install nuget.commandline -confirm", - "# For NUnit 2.6", - "choco install dogtail.dotnet3.5sp1 -confirm", "# TODO make this a nuget dependency", "choco install nunit.install --version 2.6.4 -confirm" ] diff --git a/packer/windows/aws/enable-winrm.ps1 b/packer/windows/2016/aws/startup.ps1 similarity index 93% rename from packer/windows/aws/enable-winrm.ps1 rename to packer/windows/2016/aws/startup.ps1 index ebbcf58049..68755b06e3 100644 --- a/packer/windows/aws/enable-winrm.ps1 +++ b/packer/windows/2016/aws/startup.ps1 @@ -8,8 +8,9 @@ Set-ExecutionPolicy Unrestricted -Scope LocalMachine -Force -ErrorAction Ignore # Don't set this before Set-ExecutionPolicy as it throws an error $ErrorActionPreference = "stop" -# Install SSH -Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 +# Install .NET 3.5 for NUnit 2.6 +# Can't be installed over WinRM/SSH +Add-WindowsFeature -Name NET-Framework-Core # Remove HTTP listener Remove-Item -Path WSMan:\Localhost\listener\listener* -Recurse diff --git a/packer/windows/2019/aws/startup.ps1 b/packer/windows/2019/aws/startup.ps1 new file mode 100644 index 0000000000..bfeb680cff --- /dev/null +++ b/packer/windows/2019/aws/startup.ps1 @@ -0,0 +1,44 @@ + + +write-output "Running User Data Script" +write-host "(host) Running User Data Script" + +Set-ExecutionPolicy Unrestricted -Scope LocalMachine -Force -ErrorAction Ignore + +# Don't set this before Set-ExecutionPolicy as it throws an error +$ErrorActionPreference = "stop" + +# Install SSH +# Can't be installed over WinRM/SSH +Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 + +# Install .NET 3.5 for NUnit 2.6 +# Can't be installed over WinRM/SSH +Add-WindowsFeature -Name NET-Framework-Core + +# Remove HTTP listener +Remove-Item -Path WSMan:\Localhost\listener\listener* -Recurse + +$Cert = New-SelfSignedCertificate -CertstoreLocation Cert:\LocalMachine\My -DnsName "packer" +New-Item -Path WSMan:\LocalHost\Listener -Transport HTTPS -Address * -CertificateThumbPrint $Cert.Thumbprint -Force + +# WinRM +write-output "Setting up WinRM" +write-host "(host) setting up WinRM" + +cmd.exe /c winrm quickconfig -q +cmd.exe /c winrm set "winrm/config" '@{MaxTimeoutms="1800000"}' +cmd.exe /c winrm set "winrm/config/winrs" '@{MaxMemoryPerShellMB="8192"}' +cmd.exe /c winrm set "winrm/config/service" '@{AllowUnencrypted="true"}' +cmd.exe /c winrm set "winrm/config/client" '@{AllowUnencrypted="true"}' +cmd.exe /c winrm set "winrm/config/service/auth" '@{Basic="true"}' +cmd.exe /c winrm set "winrm/config/client/auth" '@{Basic="true"}' +cmd.exe /c winrm set "winrm/config/service/auth" '@{CredSSP="true"}' +cmd.exe /c winrm set "winrm/config/listener?Address=*+Transport=HTTPS" "@{Port=`"5986`";Hostname=`"packer`";CertificateThumbprint=`"$($Cert.Thumbprint)`"}" +cmd.exe /c netsh advfirewall firewall set rule group="remote administration" new enable=yes +cmd.exe /c netsh firewall add portopening TCP 5986 "Port 5986" +cmd.exe /c net stop winrm +cmd.exe /c sc config winrm start= auto +cmd.exe /c net start winrm + + \ No newline at end of file From d317044c2da17203800ef533296aeec128ca6ef8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 25 Apr 2020 19:55:57 -0700 Subject: [PATCH 012/155] New Dockerfiles for RHEL7 and Windows --- docker/rhel7/Dockerfile | 48 ++++++++++++++++++++++++++++++++++++ docker/windows/Dockerfile | 52 +++++++++++++++++++++++++++++++++++++++ docker/windows/README.md | 21 ++++++++++++++++ 3 files changed, 121 insertions(+) create mode 100644 docker/rhel7/Dockerfile create mode 100644 docker/windows/Dockerfile create mode 100644 docker/windows/README.md diff --git a/docker/rhel7/Dockerfile b/docker/rhel7/Dockerfile new file mode 100644 index 0000000000..1e62d648dd --- /dev/null +++ b/docker/rhel7/Dockerfile @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM registry.access.redhat.com/rhscl/devtoolset-4-toolchain-rhel7:latest +LABEL maintainer Apache Geode + +USER root +WORKDIR / + +COPY --from=registry.access.redhat.com/rhscl/s2i-core-rhel7:latest /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo + +RUN yum-config-manager --enable ubi-server-rhscl-7-rpms && \ + yum install -y --setopt=tsflags=nodocs \ + java-1.8.0-openjdk-devel \ + python27-python-pip \ + make \ + doxygen \ + zlib-devel \ + patch \ + openssl-devel \ + git \ + http://mirror.centos.org/centos/7/os/x86_64/Packages/doxygen-1.8.5-3.el7.x86_64.rpm && \ + yum -y clean all --enablerepo='*' + +ENV CMAKE_VERSION 3.15.0 +RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ + bash /tmp/cmake --skip-license --prefix=/usr/local && \ + rm /tmp/cmake + +ENV GEODE_VERSION 1.12.0 +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar xvz +ENV GEODE_HOME /apache-geode-${GEODE_VERSION} +ENV PATH $PATH:$GEODE_HOME/bin + +CMD ["bash"] diff --git a/docker/windows/Dockerfile b/docker/windows/Dockerfile new file mode 100644 index 0000000000..e021a395d8 --- /dev/null +++ b/docker/windows/Dockerfile @@ -0,0 +1,52 @@ +# escape=` + +# .NET 3.5 is required for NUnit 2.6 only. +ARG FROM_IMAGE=mcr.microsoft.com/dotnet/framework/runtime:3.5 +FROM ${FROM_IMAGE} + +SHELL ["cmd", "/S", "/C"] + +WORKDIR C:\ + +RUN powershell -Command ` + iex ((new-object net.webclient).DownloadString('https://chocolatey.org/install.ps1')); + +RUN choco install ` + git.install ` + liberica8jdk ` + cmake.portable ` + doxygen.install ` + openssl ` + strawberryperl ` + nuget.commandline -confirm + +# Only for NUnit 2.6 +RUN choco install nunit.install --version 2.6.4 -confirm + +ARG GEODE_VERSION=1.12.0 +ENV GEODE_HOME C:\apache-geode-${GEODE_VERSION} +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/%GEODE_VERSION%/apache-geode-%GEODE_VERSION%.tgz" | tar -zxvf - --exclude javadoc + +ADD https://raw.githubusercontent.com/microsoft/vs-dockerfiles/master/managed-native-desktop/Install.cmd C:\TEMP\Install.cmd +ADD https://aka.ms/vscollect.exe C:\TEMP\collect.exe + +# Download channel for fixed install. +ARG CHANNEL_URL=https://aka.ms/vs/15/release/channel +ADD ${CHANNEL_URL} C:\TEMP\VisualStudio.chman + +# Download and install Build Tools for Visual Studio 2017. +ADD https://aka.ms/vs/15/release/vs_buildtools.exe C:\TEMP\vs_buildtools.exe +RUN C:\TEMP\Install.cmd C:\TEMP\vs_buildtools.exe --quiet --wait --norestart --nocache ` + --channelUri C:\TEMP\VisualStudio.chman ` + --installChannelUri C:\TEMP\VisualStudio.chman ` + --add Microsoft.VisualStudio.Workload.ManagedDesktopBuildTools ` + --add Microsoft.Net.Component.4.5.2.TargetingPack ` + --add Microsoft.VisualStudio.Component.TestTools.BuildTools ` + --add Microsoft.VisualStudio.Workload.VCTools ` + --add Microsoft.VisualStudio.Component.VC.CLI.Support ` + --add Microsoft.VisualStudio.Component.VC.Tools.14.15 ` + --add Microsoft.VisualStudio.Component.Windows10SDK.16299.Desktop ` + --installPath C:\BuildTools + +ENTRYPOINT C:\BuildTools\VC\Auxiliary\Build\vcvarsall.bat x86_amd64 10.0.16299.0 -vcvars_ver=14.15 && +CMD cmd diff --git a/docker/windows/README.md b/docker/windows/README.md new file mode 100644 index 0000000000..d91152740c --- /dev/null +++ b/docker/windows/README.md @@ -0,0 +1,21 @@ +# C++ Toolkit Version +Currently we target VC toolset 14.15 which is forward compatible to 14.15+. +* Build Tools Component: `Microsoft.VisualStudio.Component.VC.Tools.14.15` +* Entrypoint: `vcvarsall.bat ... -vcvars_ver=14.15` + +We should investigate if we need to go back a few more minors to remain compatible +with previous releases. + +# C++ SDK Version +We haven't really figured out if or how to lock down the SDK version. Currently we use +version 10.16299. +* Build Tools Component: `Microsoft.VisualStudio.Component.Windows10SDK.16299.Desktop` +* Entrypoint: `vcvarsall.bat ... 10.0.16299.0 ...` + +ACE library seems to be stuck on 10.16299. If we upgrade in the future we may need to +unstick ACE to avoid mixing of SDKs. It is unclear is mixing SDKs is an issue. + +# .NET Framework +We build against version 4.5.2 but require 3.5 for old NUnit 2.6.4 at runtime. +* Base Image: `mcr.microsoft.com/dotnet/framework/runtime:3.5` +* Build Tools Component: `Microsoft.Net.Component.4.5.2.TargetingPack` From 9f508e298d9fe2c6a0b1779636bb60a15486345a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 18 Jun 2020 10:10:24 -0700 Subject: [PATCH 013/155] New Dockerfiles for RHEL8 Ubuntu 16.04 Ubuntu 18.04 CentOS-7 CentOS-8 --- docker/centos-7/Dockerfile | 53 ++++++++++++++++++++++ docker/centos-7/bellsoft.repo | 7 +++ docker/centos-8/Dockerfile | 55 +++++++++++++++++++++++ docker/centos-8/bellsoft.repo | 7 +++ docker/{rhel7 => rhel-7}/Dockerfile | 28 +++++++----- docker/rhel-7/bellsoft.repo | 7 +++ docker/rhel-8/Dockerfile | 56 ++++++++++++++++++++++++ docker/rhel-8/bellsoft.repo | 7 +++ docker/ubuntu-16.04/Dockerfile | 68 +++++++++++++++++++++++++++++ docker/ubuntu-18.04/Dockerfile | 68 +++++++++++++++++++++++++++++ 10 files changed, 346 insertions(+), 10 deletions(-) create mode 100644 docker/centos-7/Dockerfile create mode 100644 docker/centos-7/bellsoft.repo create mode 100644 docker/centos-8/Dockerfile create mode 100644 docker/centos-8/bellsoft.repo rename docker/{rhel7 => rhel-7}/Dockerfile (78%) create mode 100644 docker/rhel-7/bellsoft.repo create mode 100644 docker/rhel-8/Dockerfile create mode 100644 docker/rhel-8/bellsoft.repo create mode 100644 docker/ubuntu-16.04/Dockerfile create mode 100644 docker/ubuntu-18.04/Dockerfile diff --git a/docker/centos-7/Dockerfile b/docker/centos-7/Dockerfile new file mode 100644 index 0000000000..22ae8abf32 --- /dev/null +++ b/docker/centos-7/Dockerfile @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM centos/devtoolset-4-toolchain-centos7:latest +LABEL maintainer Apache Geode + +USER root +WORKDIR / + +RUN yum update -y && \ + yum -y clean all + +COPY bellsoft.repo /etc/yum.repos.d/ + +RUN yum update -y && \ + yum install -y \ + git \ + make \ + zlib-devel \ + patch \ + openssl-devel \ + bellsoft-java11 \ + doxygen \ + python3-pip \ + which && \ + yum -y clean all + +RUN pip3 install --upgrade pip && \ + pip3 install cpp-coveralls + +ARG CMAKE_VERSION=3.16.0 +RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ + bash /tmp/cmake --skip-license --prefix=/usr/local && \ + rm /tmp/cmake + +ARG GEODE_VERSION=1.12.0 +ENV GEODE_HOME /apache-geode-${GEODE_VERSION} +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar -zxvf - --exclude javadoc + +CMD ["bash"] diff --git a/docker/centos-7/bellsoft.repo b/docker/centos-7/bellsoft.repo new file mode 100644 index 0000000000..bd081070b3 --- /dev/null +++ b/docker/centos-7/bellsoft.repo @@ -0,0 +1,7 @@ +[BellSoft] +name=BellSoft Repository +baseurl=https://yum.bell-sw.com +enabled=1 +gpgcheck=1 +gpgkey=https://download.bell-sw.com/pki/GPG-KEY-bellsoft +priority=1 diff --git a/docker/centos-8/Dockerfile b/docker/centos-8/Dockerfile new file mode 100644 index 0000000000..2d93bf3c07 --- /dev/null +++ b/docker/centos-8/Dockerfile @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM centos:8 +LABEL maintainer Apache Geode + +USER root +WORKDIR / + +RUN yum update -y && \ + yum -y clean all + +COPY bellsoft.repo /etc/yum.repos.d/ + +RUN yum install -y 'dnf-command(config-manager)' && \ + yum config-manager --set-enabled PowerTools && \ + yum update -y && \ + yum install -y --setopt=tsflags=nodocs \ + git \ + make \ + zlib-devel \ + patch \ + openssl-devel \ + bellsoft-java11 \ + doxygen \ + python3-pip \ + gcc-c++ && \ + yum -y clean all + +RUN pip3 install --upgrade pip && \ + pip3 install cpp-coveralls + +ARG CMAKE_VERSION=3.16.0 +RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ + bash /tmp/cmake --skip-license --prefix=/usr/local && \ + rm /tmp/cmake + +ARG GEODE_VERSION=1.12.0 +ENV GEODE_HOME /apache-geode-${GEODE_VERSION} +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar -zxvf - --exclude javadoc + +CMD ["bash"] diff --git a/docker/centos-8/bellsoft.repo b/docker/centos-8/bellsoft.repo new file mode 100644 index 0000000000..bd081070b3 --- /dev/null +++ b/docker/centos-8/bellsoft.repo @@ -0,0 +1,7 @@ +[BellSoft] +name=BellSoft Repository +baseurl=https://yum.bell-sw.com +enabled=1 +gpgcheck=1 +gpgkey=https://download.bell-sw.com/pki/GPG-KEY-bellsoft +priority=1 diff --git a/docker/rhel7/Dockerfile b/docker/rhel-7/Dockerfile similarity index 78% rename from docker/rhel7/Dockerfile rename to docker/rhel-7/Dockerfile index 1e62d648dd..6c89c3b22e 100644 --- a/docker/rhel7/Dockerfile +++ b/docker/rhel-7/Dockerfile @@ -23,26 +23,34 @@ WORKDIR / COPY --from=registry.access.redhat.com/rhscl/s2i-core-rhel7:latest /etc/yum.repos.d/ubi.repo /etc/yum.repos.d/ubi.repo RUN yum-config-manager --enable ubi-server-rhscl-7-rpms && \ + yum update -y && \ + yum -y clean all + +COPY bellsoft.repo /etc/yum.repos.d/ + +RUN yum-config-manager --enable ubi-server-rhscl-7-rpms && \ + yum update -y && \ yum install -y --setopt=tsflags=nodocs \ - java-1.8.0-openjdk-devel \ - python27-python-pip \ + git \ make \ - doxygen \ zlib-devel \ patch \ openssl-devel \ - git \ - http://mirror.centos.org/centos/7/os/x86_64/Packages/doxygen-1.8.5-3.el7.x86_64.rpm && \ - yum -y clean all --enablerepo='*' + rh-python36-python-pip \ + bellsoft-java11 \ + http://mirror.centos.org/centos/7/os/x86_64/Packages/doxygen-1.8.5-4.el7.x86_64.rpm && \ + yum -y clean all + +RUN scl enable rh-python36 "pip3 install --upgrade pip && \ + pip3 install cpp-coveralls" -ENV CMAKE_VERSION 3.15.0 +ARG CMAKE_VERSION=3.16.0 RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ bash /tmp/cmake --skip-license --prefix=/usr/local && \ rm /tmp/cmake -ENV GEODE_VERSION 1.12.0 -RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar xvz +ARG GEODE_VERSION=1.12.0 ENV GEODE_HOME /apache-geode-${GEODE_VERSION} -ENV PATH $PATH:$GEODE_HOME/bin +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar -zxvf - --exclude javadoc CMD ["bash"] diff --git a/docker/rhel-7/bellsoft.repo b/docker/rhel-7/bellsoft.repo new file mode 100644 index 0000000000..bd081070b3 --- /dev/null +++ b/docker/rhel-7/bellsoft.repo @@ -0,0 +1,7 @@ +[BellSoft] +name=BellSoft Repository +baseurl=https://yum.bell-sw.com +enabled=1 +gpgcheck=1 +gpgkey=https://download.bell-sw.com/pki/GPG-KEY-bellsoft +priority=1 diff --git a/docker/rhel-8/Dockerfile b/docker/rhel-8/Dockerfile new file mode 100644 index 0000000000..793dd556ba --- /dev/null +++ b/docker/rhel-8/Dockerfile @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM registry.access.redhat.com/ubi8/ubi:latest +LABEL maintainer Apache Geode + +USER root +WORKDIR / + +RUN yum update -y && \ + yum -y clean all + +COPY bellsoft.repo /etc/yum.repos.d/ +COPY --from=centos:8 /etc/yum.repos.d/CentOS-PowerTools.repo /etc/yum.repos.d/ +COPY --from=centos:8 /etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial /etc/pki/rpm-gpg/ + +RUN yum config-manager --set-enabled PowerTools && \ + yum update -y && \ + yum install -y --setopt=tsflags=nodocs \ + git \ + make \ + zlib-devel \ + patch \ + openssl-devel \ + bellsoft-java11 \ + doxygen \ + python3-pip \ + gcc-c++ && \ + yum -y clean all + +RUN pip3 install --upgrade pip && \ + pip3 install cpp-coveralls + +ARG CMAKE_VERSION=3.16.0 +RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ + bash /tmp/cmake --skip-license --prefix=/usr/local && \ + rm /tmp/cmake + +ARG GEODE_VERSION=1.12.0 +ENV GEODE_HOME /apache-geode-${GEODE_VERSION} +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar -zxvf - --exclude javadoc + +CMD ["bash"] diff --git a/docker/rhel-8/bellsoft.repo b/docker/rhel-8/bellsoft.repo new file mode 100644 index 0000000000..bd081070b3 --- /dev/null +++ b/docker/rhel-8/bellsoft.repo @@ -0,0 +1,7 @@ +[BellSoft] +name=BellSoft Repository +baseurl=https://yum.bell-sw.com +enabled=1 +gpgcheck=1 +gpgkey=https://download.bell-sw.com/pki/GPG-KEY-bellsoft +priority=1 diff --git a/docker/ubuntu-16.04/Dockerfile b/docker/ubuntu-16.04/Dockerfile new file mode 100644 index 0000000000..59ba0f9bdc --- /dev/null +++ b/docker/ubuntu-16.04/Dockerfile @@ -0,0 +1,68 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:16.04 +LABEL maintainer Apache Geode + +USER root +WORKDIR / + +ENV DEBIAN_FRONTEND noninteractive +RUN apt update && \ + apt -yq full-upgrade && \ + apt-get -y install \ + apt-transport-https \ + ca-certificates \ + curl \ + gnupg2 \ + software-properties-common && \ + apt-get -y autoremove && \ + apt-get autoclean + +RUN . /etc/os-release && \ + curl -s https://download.bell-sw.com/pki/GPG-KEY-bellsoft | apt-key add - && \ + apt-add-repository "deb http://apt.bell-sw.com/ stable main" && \ + curl -s https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - && \ + apt-add-repository "deb http://apt.llvm.org/${VERSION_CODENAME}/ llvm-toolchain-${VERSION_CODENAME}-6.0 main" + +RUN apt update && apt -yq full-upgrade && apt-get -y install \ + build-essential \ + libc++-dev \ + libc++abi-dev \ + zlib1g-dev \ + libssl-dev \ + bellsoft-java11 \ + git \ + doxygen \ + graphviz \ + python3-pip \ + clang-format-6.0 && \ + apt-get -y autoremove && \ + apt-get autoclean + +RUN pip3 install --upgrade pip && \ + pip3 install cpp-coveralls + +ARG CMAKE_VERSION=3.16.0 +RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ + bash /tmp/cmake --skip-license --prefix=/usr/local && \ + rm /tmp/cmake + +ARG GEODE_VERSION=1.12.0 +ENV GEODE_HOME /apache-geode-${GEODE_VERSION} +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar -zxvf - --exclude javadoc + +CMD ["bash"] diff --git a/docker/ubuntu-18.04/Dockerfile b/docker/ubuntu-18.04/Dockerfile new file mode 100644 index 0000000000..1e8d69e1cc --- /dev/null +++ b/docker/ubuntu-18.04/Dockerfile @@ -0,0 +1,68 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:18.04 +LABEL maintainer Apache Geode + +USER root +WORKDIR / + +ENV DEBIAN_FRONTEND noninteractive +RUN apt update && \ + apt -yq full-upgrade && \ + apt-get -y install \ + apt-transport-https \ + ca-certificates \ + curl \ + gnupg2 \ + software-properties-common && \ + apt-get -y autoremove && \ + apt-get autoclean + +RUN . /etc/os-release && \ + curl -s https://download.bell-sw.com/pki/GPG-KEY-bellsoft | apt-key add - && \ + apt-add-repository "deb http://apt.bell-sw.com/ stable main" && \ + curl -s https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - && \ + apt-add-repository "deb http://apt.llvm.org/${VERSION_CODENAME}/ llvm-toolchain-${VERSION_CODENAME}-6.0 main" + +RUN apt update && apt -yq full-upgrade && apt-get -y install \ + build-essential \ + libc++-dev \ + libc++abi-dev \ + zlib1g-dev \ + libssl-dev \ + bellsoft-java11 \ + git \ + doxygen \ + graphviz \ + python3-pip \ + clang-format-6.0 && \ + apt-get -y autoremove && \ + apt-get autoclean + +RUN pip3 install --upgrade pip && \ + pip3 install cpp-coveralls + +ARG CMAKE_VERSION=3.16.0 +RUN curl -o /tmp/cmake -s "https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh" && \ + bash /tmp/cmake --skip-license --prefix=/usr/local && \ + rm /tmp/cmake + +ARG GEODE_VERSION=1.12.0 +ENV GEODE_HOME /apache-geode-${GEODE_VERSION} +RUN curl -L -s "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | tar -zxvf - --exclude javadoc + +CMD ["bash"] From 37c5b775ceac1b16a161da1b435a706aeebdd838 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 2 Jul 2020 20:24:43 -0700 Subject: [PATCH 014/155] Fixes .NET test execution --- clicache/integration-test/test.bat.in | 9 +-------- clicache/integration-test2/CMakeLists.txt | 1 + clicache/test/CMakeLists.txt | 1 + clicache/test2/CMakeLists.txt | 1 + tests/cli/FwkClient/ClientProcess.cs | 6 ++++-- 5 files changed, 8 insertions(+), 10 deletions(-) diff --git a/clicache/integration-test/test.bat.in b/clicache/integration-test/test.bat.in index 1d7949d1ef..31fbe33a37 100644 --- a/clicache/integration-test/test.bat.in +++ b/clicache/integration-test/test.bat.in @@ -31,17 +31,10 @@ set GEODE_NATIVE_HOME="%TEST_DIR%" set TESTSRC=$ set GF_JAVA=$ -set GFJAVA=$ set GFE_LOGLEVEL=config set GFE_SECLOGLEVEL=config set GFE_DIR=$ -set MCAST_ADDR=224.10.13.63 -set MCAST_PORT=${PORT} -set TIMEBOMB=3600 set GF_CLASSPATH=%GF_CLASSPATH%;$\tests\javaobject\javaobject.jar -set PROFILERCMD= -set BUG481= -set TESTNAME=${TEST} rmdir /q /s "%TEST_DIR%" 2>nul mkdir "%TEST_DIR%" @@ -49,7 +42,7 @@ if %errorlevel% neq 0 exit /b %errorlevel% pushd "%TEST_DIR%" if %errorlevel% neq 0 exit /b %errorlevel% -${NUNIT_CONSOLE} /labels /run:${NAMESPACE}.${TESTCLASS} ..\..\$\UnitTests.dll +${NUNIT_CONSOLE} /timeout 1000000 /labels /run:${NAMESPACE}.${TESTCLASS} ..\..\$\UnitTests.dll if %errorlevel% neq 0 exit /b %errorlevel% popd diff --git a/clicache/integration-test2/CMakeLists.txt b/clicache/integration-test2/CMakeLists.txt index 725ebde939..d387de9ee5 100644 --- a/clicache/integration-test2/CMakeLists.txt +++ b/clicache/integration-test2/CMakeLists.txt @@ -72,6 +72,7 @@ set_target_properties( ${PROJECT_NAME} PROPERTIES COMMON_LANGUAGE_RUNTIME "" VS_GLOBAL_ROOTNAMESPACE ${PROJECT_NAME} VS_GLOBAL_TreatWarningsAsErrors True + VS_GLOBAL_IsTestProject True VS_GLOBAL_TestProjectType UnitTest VS_GLOBAL_PROJECT_TYPES "{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}" VS_DOTNET_REFERENCES "System;System.Xml;System.Web;System.Configuration" diff --git a/clicache/test/CMakeLists.txt b/clicache/test/CMakeLists.txt index acb0229b70..f53a087047 100644 --- a/clicache/test/CMakeLists.txt +++ b/clicache/test/CMakeLists.txt @@ -36,6 +36,7 @@ set_target_properties(Apache.Geode.Test PROPERTIES COMMON_LANGUAGE_RUNTIME "" VS_GLOBAL_CLRSupport "true" VS_GLOBAL_KEYWORD "ManagedCProj" + VS_GLOBAL_IsTestProject True VS_GLOBAL_TestProjectType UnitTest VS_GLOBAL_PROJECT_TYPES "{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}" VS_GLOBAL_ROOTNAMESPACE Apache.Geode.Test diff --git a/clicache/test2/CMakeLists.txt b/clicache/test2/CMakeLists.txt index cd022c663b..e3d2ae2b6b 100644 --- a/clicache/test2/CMakeLists.txt +++ b/clicache/test2/CMakeLists.txt @@ -40,6 +40,7 @@ set_target_properties( ${PROJECT_NAME} PROPERTIES COMMON_LANGUAGE_RUNTIME "" VS_GLOBAL_ROOTNAMESPACE ${PROJECT_NAME} VS_GLOBAL_TreatWarningsAsErrors True + VS_GLOBAL_IsTestProject True VS_GLOBAL_TestProjectType UnitTest VS_GLOBAL_PROJECT_TYPES "{3AC096D0-A1C2-E12C-1390-A8335801FDAB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}" VS_DOTNET_REFERENCES "System;System.Xml;System.Web;System.Configuration" diff --git a/tests/cli/FwkClient/ClientProcess.cs b/tests/cli/FwkClient/ClientProcess.cs index e573c54b55..8e3dfc29b6 100644 --- a/tests/cli/FwkClient/ClientProcess.cs +++ b/tests/cli/FwkClient/ClientProcess.cs @@ -32,7 +32,7 @@ class ClientProcess public static IChannel clientChannel = null; public static string bbUrl; public static string logFile = null; - static void Main(string[] args) + static int Main(string[] args) { string myId = "0"; try @@ -78,7 +78,9 @@ static void Main(string[] args) { Util.Log("FATAL: Client {0}, Exception caught: {1}", myId, ex); } - System.Threading.Thread.Sleep(System.Threading.Timeout.Infinite); + System.Threading.Thread.Sleep(TimeSpan.FromMinutes(10)); + Util.Log("FATAL: Client {0}, Terminating after timeout.", myId); + return -1; } private static void ShowUsage(string[] args) From 8682649fd53a72c0580b0d3ce4fd1410ab18f5b7 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 2 Jul 2020 20:27:11 -0700 Subject: [PATCH 015/155] Makes SSH common on AWS and GCP --- packer/build-windows-2016-vs-2017.json | 15 ++++-- packer/build-windows-2019-vs-2017.json | 17 ++++--- packer/windows/aws/init-ssh.ps1 | 8 ++-- packer/windows/google/init-ssh.ps1 | 64 ++++++++++++++++++++++++++ 4 files changed, 89 insertions(+), 15 deletions(-) create mode 100644 packer/windows/google/init-ssh.ps1 diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index ca3a8eaffc..e5c69539cd 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -103,18 +103,23 @@ { "type": "file", "source": "windows/aws/init-ssh.ps1", - "destination": "$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1", + "destination": "$Env:ProgramData/ssh/init-ssh.ps1", "only": [ "amazon-ebs" ] }, + { + "type": "file", + "source": "windows/google/init-ssh.ps1", + "destination": "$Env:ProgramData/ssh/init-ssh.ps1", + "only": [ + "googlecompute" + ] + }, { "type": "powershell", "inline": [ - "$Env:ProgramData\\Amazon\\EC2-Windows\\Launch\\Scripts\\init-ssh.ps1 -schedule" - ], - "only": [ - "amazon-ebs" + ". $Env:ProgramData\\ssh\\init-ssh.ps1 -schedule" ] }, { diff --git a/packer/build-windows-2019-vs-2017.json b/packer/build-windows-2019-vs-2017.json index 5843447257..ded4742aa1 100644 --- a/packer/build-windows-2019-vs-2017.json +++ b/packer/build-windows-2019-vs-2017.json @@ -56,7 +56,7 @@ }, "disk_size": "100", "metadata": { - "windows-startup-script-cmd": "dism /Online /NoRestart /Add-Capability /CapabilityName:OpenSSH.Server~~~~0.0.1.0 & dism /Online /NoRestart /Enable-Feature /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" + "windows-startup-script-cmd": "dism /Online /NoRestart /Add-Capability /CapabilityName:OpenSSH.Server~~~~0.0.1.0 & dism /Online /NoRestart /Enable-Feature /All /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" }, "communicator": "winrm", "winrm_username": "Administrator", @@ -103,18 +103,23 @@ { "type": "file", "source": "windows/aws/init-ssh.ps1", - "destination": "$Env:ProgramData/Amazon/EC2-Windows/Launch/Scripts/init-ssh.ps1", + "destination": "$Env:ProgramData/ssh/init-ssh.ps1", "only": [ "amazon-ebs" ] }, + { + "type": "file", + "source": "windows/google/init-ssh.ps1", + "destination": "$Env:ProgramData/ssh/init-ssh.ps1", + "only": [ + "googlecompute" + ] + }, { "type": "powershell", "inline": [ - "$Env:ProgramData\\Amazon\\EC2-Windows\\Launch\\Scripts\\init-ssh.ps1 -schedule" - ], - "only": [ - "amazon-ebs" + ". $Env:ProgramData\\ssh\\init-ssh.ps1 -schedule" ] }, { diff --git a/packer/windows/aws/init-ssh.ps1 b/packer/windows/aws/init-ssh.ps1 index 9f2a288cd3..651c98b2e5 100644 --- a/packer/windows/aws/init-ssh.ps1 +++ b/packer/windows/aws/init-ssh.ps1 @@ -30,20 +30,20 @@ Import-Module $modulePath Initialize-Log -Filename "Ec2Launch.log" -AllowLogToConsole if ($Schedule) { - Write-Log "Sheduling SSH Authorized Keys Initialization..." + Write-Log "Scheduling SSH Authorized Keys Initialization..." Register-ScriptScheduler -ScriptPath $scriptPath -ScheduleName "SSH Authorized Keys Initialization" - Write-Log "Sheduled SSH Authorized Keys Initialization." + Write-Log "Scheduling SSH Authorized Keys Initialization." Complete-Log Exit 0 } -Write-Log "Initializating SSH Authorized Keys..." +Write-Log "Initializing SSH Authorized Keys..." Invoke-WebRequest -Uri "http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key" -OutFile $authorizedKeysPath icacls $authorizedKeysPath /inheritance:r /grant "SYSTEM:(F)" /grant "BUILTIN\Administrators:(F)" -Write-Log "Initializated SSH Authorized Keys." +Write-Log "Initialized SSH Authorized Keys." Complete-Log Exit 0 diff --git a/packer/windows/google/init-ssh.ps1 b/packer/windows/google/init-ssh.ps1 new file mode 100644 index 0000000000..4163584941 --- /dev/null +++ b/packer/windows/google/init-ssh.ps1 @@ -0,0 +1,64 @@ + # Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[CmdletBinding(DefaultParameterSetName = 'Default')] +param ( + # Schedules the script to run on the next boot. + # If this argument is not provided, script is executed immediately. + [parameter(Mandatory = $false, ParameterSetName = "Schedule")] + [switch] $Schedule = $false +) + +Set-Variable log -Option Constant -Scope Local -Value c:\ssh-init.log +Set-Variable scriptPath -Option Constant -Scope Local -Value (Join-Path $PSScriptRoot -ChildPath $MyInvocation.MyCommand.Name) +Set-Variable authorizedKeysPath -Option Constant -Scope Local -Value (Join-Path $env:ProgramData -ChildPath "ssh\administrators_authorized_keys") +Set-Variable sskKeysPath -Option Constant -Scope Local -Value (Join-Path $env:ProgramData -ChildPath "ssh\ssh-keys") + +function Write-Log { + Write-Host "$args" + Add-Content -Path $log -Value "$args" +} + +if ($Schedule) { + Write-Log "Scheduling SSH Authorized Keys Initialization..." + schtasks.exe /Create /F /TN "SSH Authorized Keys Initialization" /RU SYSTEM /SC ONSTART /TR "powershell.exe -File '$scriptPath'" + Write-Log "Scheduled SSH Authorized Keys Initialization." + Exit 0 +} + +Write-Log "Initializing SSH Authorized Keys..." + +Invoke-WebRequest -Headers @{'Metadata-Flavor'='Google'} -Uri 'http://metadata.google.internal/computeMetadata/v1/instance/attributes/ssh-keys' -OutFile "$sskKeysPath" +Get-Content -Path "$sskKeysPath" -OutVariable sshKeys + +Write-Log "Got ssh-keys: $sshKeys" + +Remove-Item -Path "$authorizedKeysPath" -Force +$sshKeys = $sshKeys.Split([Environment]::NewLine, [System.StringSplitOptions]::RemoveEmptyEntries) +foreach ($sshKey in $sshKeys) { + $sshKey = $sshKey.Split(':') + $user = $sshKey[0] + $key = $sshKey[1] + + Write-Log "Adding admin user $user with key $key." + + New-LocalUser -AccountNeverExpires -Name $user -NoPassword + Add-LocalGroupMember -Group "Administrators" -Member $user + + Add-Content -Path "$authorizedKeysPath" -Value "$key" + icacls "${authorizedKeysPath}" /inheritance:r /grant "SYSTEM:(F)" /grant "BUILTIN\Administrators:(F)" +} + +Write-Log "Initialized SSH Authorized Keys." From 90851bab598775fd5719d7cd58ce72b66d624de1 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 2 Jul 2020 22:34:17 -0700 Subject: [PATCH 016/155] Initial ytt template --- ci/pipeline.yml | 355 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 355 insertions(+) create mode 100644 ci/pipeline.yml diff --git a/ci/pipeline.yml b/ci/pipeline.yml new file mode 100644 index 0000000000..44e0600ea4 --- /dev/null +++ b/ci/pipeline.yml @@ -0,0 +1,355 @@ +#! helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 +#! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml +#! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main + +#@ def resource(name, type, source): +name: #@ name +type: #@ type +source: #@ source +#@ end + +#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): +#@ return resource(name, "docker-image", { +#@ "repository": repository, +#@ "tag": tag, +#@ "username": username, +#@ "password": password +#@ }) +#@ end + +#@ def gcr_image_resource(name, repository, tag="latest"): +#@ return docker_image_resource(name, repository, tag, "_json_key", "((gcr-json-key))") +#@ end + +#@ def git_resource(name, uri, branch, paths=[], depth=1): +#@ return resource(name, "git", { +#@ "branch": branch, +#@ "depth": depth, +#@ "paths": paths, +#@ "uri": uri +#@ }) +#@ end + +#@ def create_instance(image_family): +task: create +image: task-image +config: + platform: linux + outputs: + - name: identity + - name: instance + params: + BUILD_IMAGE_FAMILY: #@ image_family + run: + path: bash + args: + - -c + - | + set -ueo pipefail + + BUILD_PREFIX=${BUILD_PREFIX:-build} + BUILD_USER=${BUILD_USER:-build} + BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} + BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} + BUILD_ZONE=${BUILD_ZONE:-us-central1-f} + BUILD_SUBNET=${BUILD_SUBNET:-default} + BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} + BUILD_IMAGE_FAMILY=${BUILD_IMAGE_FAMILY:-jbarrett-10-2-build-rhel-7} + BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} + BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} + + if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then + echo "${BUILD_IDENTITY_DIR} not found." + exit 1 + fi + + if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then + echo "${BUILD_INSTANCE_DIR} not found." + exit 1 + fi + + ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} + ssh_pubkey_file=${ssh_key_file}.pub + ssh-keygen -m pem -t rsa -f ${ssh_key_file} -C ${BUILD_USER} -N '' <<< y + ssh_pubkey=$(cat ${ssh_pubkey_file}) + + ssh_keys_file=${BUILD_IDENTITY_DIR}/ssh_keys_file + echo "${BUILD_USER}:${ssh_pubkey}" > ${ssh_keys_file} + + instance_file=${BUILD_INSTANCE_DIR}/instance.json + gcloud compute instances create ${BUILD_INSTANCE} \ + --format json \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --subnet=${BUILD_SUBNET} \ + --machine-type=n1-standard-8 \ + --boot-disk-size=200GB \ + --boot-disk-type=pd-standard \ + --boot-disk-device-name=${BUILD_INSTANCE} \ + --image-project=${BUILD_IMAGE_PROJECT} \ + --image-family=${BUILD_IMAGE_FAMILY} \ + --metadata-from-file ssh-keys=${ssh_keys_file} \ + | tee ${instance_file} + + instance_file=${BUILD_INSTANCE_DIR}/instance.json + if [ ! -r "${instance_file}" ]; then + echo "${instance_file} not readable." + exit 1 + fi + + instance_name=$(jq -r '.[0].name' ${instance_file}) + external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) + + echo "ssh -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}" +#@ end + +#@ def await_instance(): +task: await +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + params: + run: + path: bash + args: + - -c + - | + set -ueo pipefail + + BUILD_USER=${BUILD_USER:-build} + BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} + BUILD_ZONE=${BUILD_ZONE:-us-central1-f} + BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} + BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} + + + if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then + echo "${BUILD_IDENTITY_DIR} not found." + exit 1 + fi + + if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then + echo "${BUILD_INSTANCE_DIR} not found." + exit 1 + fi + + ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} + if [ ! -r "${ssh_key_file}" ]; then + echo "${ssh_key_file} not readable." + exit 1 + fi + + instance_file=${BUILD_INSTANCE_DIR}/instance.json + if [ ! -r "${instance_file}" ]; then + echo "${instance_file} not readable." + exit 1 + fi + + instance_name=$(jq -r '.[0].name' ${instance_file}) + external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) + + echo "Waiting for ssh on ${instance_name} to be ready." + console_file=$(mktemp) + console_next=0 + while ! ssh -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip} echo ready 2>/dev/null ; do + gcloud compute instances get-serial-port-output ${instance_name} \ + --start ${console_next} \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --format json \ + > ${console_file} + + console_next=$(jq -r '.next' ${console_file}) + console_contents=$(jq -r '.contents' ${console_file}) + if [ ! -z "${console_contents}" ]; then + echo -n "${console_contents}" + fi + done + + rm -f ${console_file} +#@ end + +#@ def build(params={}): +task: build +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + - name: source + outputs: + - name: package + params: #@ params + run: + path: bash + args: + - -c + - | + set -ueo pipefail + + BUILD_USER=${BUILD_USER:-build} + BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} + BUILD_ZONE=${BUILD_ZONE:-us-central1-f} + BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} + BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} + + SSH_OPTIONS=${SSH_OPTIONS:-"-o StrictHostKeyChecking=no -o PasswordAuthentication=no"} + + if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then + echo "${BUILD_IDENTITY_DIR} not found." + exit 1 + fi + + if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then + echo "${BUILD_INSTANCE_DIR} not found." + exit 1 + fi + + ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} + if [ ! -r "${ssh_key_file}" ]; then + echo "${ssh_key_file} not readable." + exit 1 + fi + + instance_file=${BUILD_INSTANCE_DIR}/instance.json + if [ ! -r "${instance_file}" ]; then + echo "${instance_file} not readable." + exit 1 + fi + + external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) + + function remote_shell { + ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip} "$@" + } + + pushd source + git_url=$(git remote get-url origin) + git_rev=$(git rev-parse HEAD) + popd + + remote_shell git clone ${git_url} + remote_shell cmake -E chdir geode-native git checkout ${git_rev} + remote_shell cmake -E make_directory build + remote_shell cmake -E chdir build cmake ../geode-native ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} + remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} + remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} + remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" + + #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ +#@ end + +#@ def delete_instance(): +task: delete +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + params: + run: + path: bash + args: + - -c + - | + set -ueo pipefail + + BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} + BUILD_ZONE=${BUILD_ZONE:-us-central1-f} + BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} + + + if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then + echo "${BUILD_INSTANCE_DIR} not found." + exit 1 + fi + + instance_file=${BUILD_INSTANCE_DIR}/instance.json + if [ ! -r "${instance_file}" ]; then + echo "${instance_file} not readable." + exit 1 + fi + + instance_name=$(jq -r '.[0].name' ${instance_file}) + + gcloud compute instances delete ${instance_name} \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --delete-disks=all \ + --quiet + +#@ end + + +#! TODO data +#@ geode_native_uri = "http://github.com/pivotal-jbarrett/geode-native.git" +#@ geode_native_branch = "wip/images-gcp" +#@ windows_build_params = { +#@ "CMAKE_CONFIGURE_FLAGS": "-A x64 -Thost=x64", +#@ "CMAKE_CONFIG": "Debug", +#@ "CMAKE_BUILD_FLAGS": "/m", +#@ "CPACK_GENERATORS": "ZIP" +#@ } +#@ linux_build_params = { +#@ "CMAKE_CONFIGURE_FLAGS": "", +#@ "CMAKE_CONFIG": "Debug", +#@ "CMAKE_BUILD_FLAGS": "-j8", +#@ "CPACK_GENERATORS": "TGZ" +#@ } + +resources: + - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") + - #@ git_resource("source", geode_native_uri, geode_native_branch) + #! - #@ git_resource("ci", geode_native_uri, geode_native_branch, ["ci/*"]) + +jobs: + - name: build-windows + plan: + - get: task-image + - get: source + - do: + - #@ create_instance("jbarrett-10-2-build-windows-2019-vs-2017") + - #@ await_instance() + - #@ build(windows_build_params) + ensure: #@ delete_instance() + - name: build-rhel-7 + plan: + - get: task-image + - get: source + - do: + - #@ create_instance("jbarrett-10-2-build-rhel-7") + - #@ await_instance() + - #@ build(linux_build_params) + ensure: #@ delete_instance() + - name: build-rhel-8 + plan: + - get: task-image + - get: source + - do: + - #@ create_instance("jbarrett-10-2-build-rhel-8") + - #@ await_instance() + - #@ build(linux_build_params) + ensure: #@ delete_instance() + - name: build-ubuntu-16-04 + plan: + - get: task-image + - get: source + - do: + - #@ create_instance("jbarrett-10-2-build-ubuntu-16-04") + - #@ await_instance() + - #@ build(linux_build_params) + ensure: #@ delete_instance() + - name: build-ubuntu-18-04 + plan: + - get: task-image + - get: source + - do: + - #@ create_instance("jbarrett-10-2-build-ubuntu-18-04") + - #@ await_instance() + - #@ build(linux_build_params) + ensure: #@ delete_instance() From 191706b47e4dba0b6c9f8bc9a4785fe0162468e5 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 2 Jul 2020 23:00:55 -0700 Subject: [PATCH 017/155] Job template --- ci/pipeline.yml | 75 +++++++++++++++++-------------------------------- 1 file changed, 25 insertions(+), 50 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 44e0600ea4..600e604380 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -172,7 +172,9 @@ config: rm -f ${console_file} #@ end -#@ def build(params={}): +#@ load("@ytt:template", "template") + +#@ def build(config, params={}): task: build image: task-image config: @@ -183,7 +185,9 @@ config: - name: source outputs: - name: package - params: #@ params + params: + _: #@ template.replace(params) + _: #@ template.replace({"CMAKE_CONFIG": config}) run: path: bash args: @@ -235,7 +239,7 @@ config: remote_shell git clone ${git_url} remote_shell cmake -E chdir geode-native git checkout ${git_rev} remote_shell cmake -E make_directory build - remote_shell cmake -E chdir build cmake ../geode-native ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} + remote_shell cmake -E chdir build cmake ../geode-native ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" @@ -285,19 +289,28 @@ config: #@ end +#@ def build_job(name, config, image_family, params): +name: #@ "build-" + name + "-" + config +plan: + - get: task-image + - get: source + - do: + - #@ create_instance(image_family) + - #@ await_instance() + - #@ build(config, params) + ensure: #@ delete_instance() +#@ end #! TODO data #@ geode_native_uri = "http://github.com/pivotal-jbarrett/geode-native.git" #@ geode_native_branch = "wip/images-gcp" #@ windows_build_params = { #@ "CMAKE_CONFIGURE_FLAGS": "-A x64 -Thost=x64", -#@ "CMAKE_CONFIG": "Debug", #@ "CMAKE_BUILD_FLAGS": "/m", #@ "CPACK_GENERATORS": "ZIP" #@ } #@ linux_build_params = { #@ "CMAKE_CONFIGURE_FLAGS": "", -#@ "CMAKE_CONFIG": "Debug", #@ "CMAKE_BUILD_FLAGS": "-j8", #@ "CPACK_GENERATORS": "TGZ" #@ } @@ -308,48 +321,10 @@ resources: #! - #@ git_resource("ci", geode_native_uri, geode_native_branch, ["ci/*"]) jobs: - - name: build-windows - plan: - - get: task-image - - get: source - - do: - - #@ create_instance("jbarrett-10-2-build-windows-2019-vs-2017") - - #@ await_instance() - - #@ build(windows_build_params) - ensure: #@ delete_instance() - - name: build-rhel-7 - plan: - - get: task-image - - get: source - - do: - - #@ create_instance("jbarrett-10-2-build-rhel-7") - - #@ await_instance() - - #@ build(linux_build_params) - ensure: #@ delete_instance() - - name: build-rhel-8 - plan: - - get: task-image - - get: source - - do: - - #@ create_instance("jbarrett-10-2-build-rhel-8") - - #@ await_instance() - - #@ build(linux_build_params) - ensure: #@ delete_instance() - - name: build-ubuntu-16-04 - plan: - - get: task-image - - get: source - - do: - - #@ create_instance("jbarrett-10-2-build-ubuntu-16-04") - - #@ await_instance() - - #@ build(linux_build_params) - ensure: #@ delete_instance() - - name: build-ubuntu-18-04 - plan: - - get: task-image - - get: source - - do: - - #@ create_instance("jbarrett-10-2-build-ubuntu-18-04") - - #@ await_instance() - - #@ build(linux_build_params) - ensure: #@ delete_instance() + #@ for config in ["Debug", "RelWithDebInfo"]: + - #@ build_job("windows", config, "jbarrett-10-2-build-windows-2019-vs-2017", windows_build_params) + - #@ build_job("rhel-7", config, "jbarrett-10-2-build-rhel-7", linux_build_params) + - #@ build_job("rhel-8", config, "jbarrett-10-2-build-rhel-8", linux_build_params) + - #@ build_job("ubuntu-16-04", config, "jbarrett-10-2-build-ubuntu-16-04", linux_build_params) + - #@ build_job("ubuntu-18-04", config, "jbarrett-10-2-build-ubuntu-18-04", linux_build_params) + #@ end From 3cdd4d692f18e19e4f55e3a66bb24a886e6cde36 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 4 Jul 2020 23:41:24 -0700 Subject: [PATCH 018/155] More templates. All platforms / Debug & RelWithDebInfo --- ci/data.yml | 2 + ci/pipeline.yml | 185 +++++++++++++++------------------------------- ci/remote.lib.txt | 59 +++++++++++++++ 3 files changed, 122 insertions(+), 124 deletions(-) create mode 100644 ci/data.yml create mode 100644 ci/remote.lib.txt diff --git a/ci/data.yml b/ci/data.yml new file mode 100644 index 0000000000..e25fdc8014 --- /dev/null +++ b/ci/data.yml @@ -0,0 +1,2 @@ +#@data/values +--- diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 600e604380..fc80b2db63 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -2,10 +2,14 @@ #! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml #! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main -#@ def resource(name, type, source): +#@ load("remote.lib.txt", "remote_shell", "remote_build_variables", "run_unit_tests", "run_integration_tests", "run_legacy_integration_tests") +#@ load("@ytt:data", "data") + +#@ def resource(name, type, source, icon=None): name: #@ name type: #@ type source: #@ source +icon: #@ icon #@ end #@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): @@ -14,7 +18,7 @@ source: #@ source #@ "tag": tag, #@ "username": username, #@ "password": password -#@ }) +#@ }, "docker") #@ end #@ def gcr_image_resource(name, repository, tag="latest"): @@ -27,7 +31,7 @@ source: #@ source #@ "depth": depth, #@ "paths": paths, #@ "uri": uri -#@ }) +#@ }, "github") #@ end #@ def create_instance(image_family): @@ -44,29 +48,16 @@ config: path: bash args: - -c + #@yaml/text-templated-strings - | set -ueo pipefail + (@= remote_build_variables() @) BUILD_PREFIX=${BUILD_PREFIX:-build} - BUILD_USER=${BUILD_USER:-build} BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} - BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} - BUILD_ZONE=${BUILD_ZONE:-us-central1-f} BUILD_SUBNET=${BUILD_SUBNET:-default} BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} BUILD_IMAGE_FAMILY=${BUILD_IMAGE_FAMILY:-jbarrett-10-2-build-rhel-7} - BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} - BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} - - if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then - echo "${BUILD_IDENTITY_DIR} not found." - exit 1 - fi - - if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then - echo "${BUILD_INSTANCE_DIR} not found." - exit 1 - fi ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} ssh_pubkey_file=${ssh_key_file}.pub @@ -78,29 +69,22 @@ config: instance_file=${BUILD_INSTANCE_DIR}/instance.json gcloud compute instances create ${BUILD_INSTANCE} \ - --format json \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ - --subnet=${BUILD_SUBNET} \ - --machine-type=n1-standard-8 \ - --boot-disk-size=200GB \ - --boot-disk-type=pd-standard \ - --boot-disk-device-name=${BUILD_INSTANCE} \ - --image-project=${BUILD_IMAGE_PROJECT} \ - --image-family=${BUILD_IMAGE_FAMILY} \ - --metadata-from-file ssh-keys=${ssh_keys_file} \ - | tee ${instance_file} - - instance_file=${BUILD_INSTANCE_DIR}/instance.json - if [ ! -r "${instance_file}" ]; then - echo "${instance_file} not readable." - exit 1 - fi - - instance_name=$(jq -r '.[0].name' ${instance_file}) - external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) - - echo "ssh -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}" + --format json \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --subnet=${BUILD_SUBNET} \ + --machine-type=n1-standard-8 \ + --boot-disk-size=200GB \ + --boot-disk-type=pd-standard \ + --boot-disk-device-name=${BUILD_INSTANCE} \ + --image-project=${BUILD_IMAGE_PROJECT} \ + --image-family=${BUILD_IMAGE_FAMILY} \ + --metadata-from-file ssh-keys=${ssh_keys_file} \ + | tee ${instance_file} + + (@=remote_shell() @) + + echo "ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip}" #@ end #@ def await_instance(): @@ -116,45 +100,18 @@ config: path: bash args: - -c + #@yaml/text-templated-strings - | set -ueo pipefail - - BUILD_USER=${BUILD_USER:-build} - BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} - BUILD_ZONE=${BUILD_ZONE:-us-central1-f} - BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} - BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} - - - if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then - echo "${BUILD_IDENTITY_DIR} not found." - exit 1 - fi - - if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then - echo "${BUILD_INSTANCE_DIR} not found." - exit 1 - fi - - ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} - if [ ! -r "${ssh_key_file}" ]; then - echo "${ssh_key_file} not readable." - exit 1 - fi - - instance_file=${BUILD_INSTANCE_DIR}/instance.json - if [ ! -r "${instance_file}" ]; then - echo "${instance_file} not readable." - exit 1 - fi + (@=remote_shell() @) + SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" instance_name=$(jq -r '.[0].name' ${instance_file}) - external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) echo "Waiting for ssh on ${instance_name} to be ready." console_file=$(mktemp) console_next=0 - while ! ssh -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip} echo ready 2>/dev/null ; do + while ! remote_shell echo ready 2>/dev/null ; do gcloud compute instances get-serial-port-output ${instance_name} \ --start ${console_next} \ --project=${BUILD_PROJECT} \ @@ -192,44 +149,11 @@ config: path: bash args: - -c + #@yaml/text-templated-strings - | set -ueo pipefail - BUILD_USER=${BUILD_USER:-build} - BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} - BUILD_ZONE=${BUILD_ZONE:-us-central1-f} - BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} - BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} - - SSH_OPTIONS=${SSH_OPTIONS:-"-o StrictHostKeyChecking=no -o PasswordAuthentication=no"} - - if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then - echo "${BUILD_IDENTITY_DIR} not found." - exit 1 - fi - - if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then - echo "${BUILD_INSTANCE_DIR} not found." - exit 1 - fi - - ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} - if [ ! -r "${ssh_key_file}" ]; then - echo "${ssh_key_file} not readable." - exit 1 - fi - - instance_file=${BUILD_INSTANCE_DIR}/instance.json - if [ ! -r "${instance_file}" ]; then - echo "${instance_file} not readable." - exit 1 - fi - - external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) - - function remote_shell { - ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip} "$@" - } + (@= remote_shell() @) pushd source git_url=$(git remote get-url origin) @@ -247,6 +171,31 @@ config: #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ #@ end +#@ def remote_task(name, config, commands, timeout, params={}): +task: #@ name +timeout: #@ timeout +image: task-image +attempts: 5 +config: + platform: linux + inputs: + - name: identity + - name: instance + outputs: + params: + _: #@ template.replace(params) + _: #@ template.replace({"CMAKE_CONFIG": config}) + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -ueo pipefail + (@= remote_shell() @) + (@= commands @) +#@ end + #@ def delete_instance(): task: delete image: task-image @@ -260,24 +209,10 @@ config: path: bash args: - -c + #@yaml/text-templated-strings - | set -ueo pipefail - - BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} - BUILD_ZONE=${BUILD_ZONE:-us-central1-f} - BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} - - - if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then - echo "${BUILD_INSTANCE_DIR} not found." - exit 1 - fi - - instance_file=${BUILD_INSTANCE_DIR}/instance.json - if [ ! -r "${instance_file}" ]; then - echo "${instance_file} not readable." - exit 1 - fi + (@= remote_build_variables() @) instance_name=$(jq -r '.[0].name' ${instance_file}) @@ -286,7 +221,6 @@ config: --zone=${BUILD_ZONE} \ --delete-disks=all \ --quiet - #@ end #@ def build_job(name, config, image_family, params): @@ -298,6 +232,9 @@ plan: - #@ create_instance(image_family) - #@ await_instance() - #@ build(config, params) + - #@ remote_task("unit-tests", config, run_unit_tests(), "5m", params) + - #@ remote_task("integration-tests", config, run_integration_tests(), "30m", params) + - #@ remote_task("legacy-integration-tests", config, run_legacy_integration_tests(), "2h", params) ensure: #@ delete_instance() #@ end diff --git a/ci/remote.lib.txt b/ci/remote.lib.txt new file mode 100644 index 0000000000..ba92ce75bf --- /dev/null +++ b/ci/remote.lib.txt @@ -0,0 +1,59 @@ +(@ def remote_build_variables(): -@) +BUILD_USER=${BUILD_USER:-build} +BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} +BUILD_ZONE=${BUILD_ZONE:-us-central1-f} +BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} +BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} + +if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then + echo "${BUILD_IDENTITY_DIR} not found." + exit 1 +fi + +if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then + echo "${BUILD_INSTANCE_DIR} not found." + exit 1 +fi +(@- end @) + +(@ def remote_shell(): -@) +(@=remote_build_variables() @) + +SSH_OPTIONS=${SSH_OPTIONS:-"-o StrictHostKeyChecking=no -o PasswordAuthentication=no"} + +ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} +if [ ! -r "${ssh_key_file}" ]; then + echo "${ssh_key_file} not readable." + exit 1 +fi + +instance_file=${BUILD_INSTANCE_DIR}/instance.json +if [ ! -r "${instance_file}" ]; then + echo "${instance_file} not readable." + exit 1 +fi + +external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) + +function remote_shell { + ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip} "$@" +} +(@- end @) + +(@ def run_unit_tests(): -@) +remote_shell taskkill /t /f /im ctest.exe /im apache-geode_unittests.exe || true +remote_shell pkill ^ctest$ ^apache-geode_unittests$ || true +remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed +(@- end @) + +(@ def run_integration_tests(): -@) +remote_shell taskkill /t /f /im ctest.exe /im cpp-integration-test.exe /im java.exe || true +remote_shell pkill ^ctest$ ^cpp-integration-test$ ^java$ || true +remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed -E ^BasicIPv6Test +(@- end @) + +(@ def run_legacy_integration_tests(): -@) +remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true +remote_shell pkill ^ctest$ ^test ^java$ || true +remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed +(@- end @) From c5d5718bff5aa2f7134f9b52bf6f0cf9d9a6cb43 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 5 Jul 2020 19:55:26 -0700 Subject: [PATCH 019/155] Fixes tests --- ci/pipeline.yml | 4 ++-- ci/remote.lib.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index fc80b2db63..0613c7e173 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -73,7 +73,7 @@ config: --project=${BUILD_PROJECT} \ --zone=${BUILD_ZONE} \ --subnet=${BUILD_SUBNET} \ - --machine-type=n1-standard-8 \ + --machine-type=e2-standard-16 \ --boot-disk-size=200GB \ --boot-disk-type=pd-standard \ --boot-disk-device-name=${BUILD_INSTANCE} \ @@ -212,7 +212,7 @@ config: #@yaml/text-templated-strings - | set -ueo pipefail - (@= remote_build_variables() @) + (@= remote_shell() @) instance_name=$(jq -r '.[0].name' ${instance_file}) diff --git a/ci/remote.lib.txt b/ci/remote.lib.txt index ba92ce75bf..4e11246c44 100644 --- a/ci/remote.lib.txt +++ b/ci/remote.lib.txt @@ -42,18 +42,18 @@ function remote_shell { (@ def run_unit_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im apache-geode_unittests.exe || true -remote_shell pkill ^ctest$ ^apache-geode_unittests$ || true +remote_shell 'pkill ^ctest$; pkill ^apache-geode_unittests$' || true remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed (@- end @) (@ def run_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im cpp-integration-test.exe /im java.exe || true -remote_shell pkill ^ctest$ ^cpp-integration-test$ ^java$ || true +remote_shell 'pkill ^ctest$; pkill ^cpp-integration-test$; pkill ^java$' || true remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed -E ^BasicIPv6Test (@- end @) (@ def run_legacy_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true -remote_shell pkill ^ctest$ ^test ^java$ || true +remote_shell 'pkill ^ctest$; pkill ^test; pkill ^java$' || true remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed (@- end @) From 6b221b3facb1d7f8ff816fc9b0ef02e35539ab3c Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 5 Jul 2020 22:06:56 -0700 Subject: [PATCH 020/155] Fixes GEODE_HOME on linux --- packer/linux/install-geode.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packer/linux/install-geode.sh b/packer/linux/install-geode.sh index 6447450644..50ef1ae51e 100644 --- a/packer/linux/install-geode.sh +++ b/packer/linux/install-geode.sh @@ -23,4 +23,4 @@ cd /usr/local curl -L "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | \ tar xzf - -echo export GEODE_HOME=/usr/local/apache-geode-${GEODE_VERSION} > /etc/profile.d/geode.sh +echo GEODE_HOME=/usr/local/apache-geode-${GEODE_VERSION} >> /etc/environment From e808a12cd16bd71451ab7ff7239420260c959270 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 5 Jul 2020 22:07:06 -0700 Subject: [PATCH 021/155] Adds GCI resource. --- ci/pipeline.yml | 47 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 0613c7e173..02e3be7a9e 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -34,15 +34,31 @@ icon: #@ icon #@ }, "github") #@ end +#@ def gci_resource_name(family): +#@ return family + "-gci" +#@ end + +#@ def gci_resource(family): +#@ return resource(gci_resource_name(family), "gci", { +#@ "key": "((gcr-json-key))", +#@ "family_project": "gemfire-dev", +#@ "family": family, +#@ }, "google-cloud") +#@ end + #@ def create_instance(image_family): task: create image: task-image config: platform: linux + inputs: + - name: #@ gci_resource_name(image_family) + path: image outputs: - name: identity - name: instance params: + BUILD_IMAGE_FILE: image/name BUILD_IMAGE_FAMILY: #@ image_family run: path: bash @@ -57,7 +73,22 @@ config: BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} BUILD_SUBNET=${BUILD_SUBNET:-default} BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} - BUILD_IMAGE_FAMILY=${BUILD_IMAGE_FAMILY:-jbarrett-10-2-build-rhel-7} + BUILD_IMAGE=${BUILD_IMAGE:-''} + BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-''} + BUILD_IMAGE_FAMILY=${BUILD_IMAGE_FAMILY:-''} + + if [ -z "${BUILD_IMAGE}" ]; then + if [ -n "${BUILD_IMAGE_FILE}" ]; then + BUILD_IMAGE=$(cat ${BUILD_IMAGE_FILE}) + elif [ -n "${BUILD_IMAGE_FAMILY}" ]; then + BUILD_IMAGE=$(gcloud compute images describe-from-family ${BUILD_IMAGE_FAMILY} --project=${BUILD_IMAGE_PROJECT} --format 'value(name)') + fi + fi + + if [ -z "${BUILD_IMAGE}" ]; then + echo "No build image." + exit 1; + fi ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} ssh_pubkey_file=${ssh_key_file}.pub @@ -228,6 +259,7 @@ name: #@ "build-" + name + "-" + config plan: - get: task-image - get: source + - get: #@ gci_resource_name(image_family) - do: - #@ create_instance(image_family) - #@ await_instance() @@ -252,10 +284,21 @@ plan: #@ "CPACK_GENERATORS": "TGZ" #@ } +resource_types: + - name: gci + type: docker-image + source: + repository: smgoller/gci-resource + resources: - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ git_resource("source", geode_native_uri, geode_native_branch) - #! - #@ git_resource("ci", geode_native_uri, geode_native_branch, ["ci/*"]) + #! - #@ git_resource("ci", geode_native_uri, geode_native_branch, ["ci/*"]) + - #@ gci_resource("jbarrett-10-2-build-windows-2019-vs-2017") + - #@ gci_resource("jbarrett-10-2-build-rhel-7") + - #@ gci_resource("jbarrett-10-2-build-rhel-8") + - #@ gci_resource("jbarrett-10-2-build-ubuntu-16-04") + - #@ gci_resource("jbarrett-10-2-build-ubuntu-18-04") jobs: #@ for config in ["Debug", "RelWithDebInfo"]: From df6132c03cbc15584844bc3f8bb02e03be5b19dc Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 5 Jul 2020 22:39:48 -0700 Subject: [PATCH 022/155] Adds data file. --- ci/data.yml | 37 +++++++++++++++++++++++++ ci/pipeline.yml | 74 +++++++++++++++++++++++-------------------------- 2 files changed, 71 insertions(+), 40 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index e25fdc8014..d39761f9c1 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -1,2 +1,39 @@ #@data/values --- +builds: + - name: windows + image_family: build-windows-2019-vs-2017 + params: + CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" + CMAKE_BUILD_FLAGS: "/m" + CPACK_GENERATORS: "ZIP" + - name: rhel-7 + image_family: build-rhel-7 + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j8" + CPACK_GENERATORS: "TGZ" + - name: rhel-8 + image_family: build-rhel-8 + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j8" + CPACK_GENERATORS: "TGZ" + - name: ubuntu-16.04 + image_family: build-ubuntu-16-04 + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j8" + CPACK_GENERATORS: "TGZ" + - name: ubuntu-18.04 + image_family: build-ubuntu-18-04 + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j8" + CPACK_GENERATORS: "TGZ" + +configs: + - name: debug + config: Debug + - name: release + config: RelWithDebInfo diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 02e3be7a9e..356dee452f 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -34,36 +34,39 @@ icon: #@ icon #@ }, "github") #@ end +#@ def image_family_name(family): +#@ return "jbarrett-10-2-" + family +#@ end + #@ def gci_resource_name(family): -#@ return family + "-gci" +#@ return image_family_name(family) + "-gci" #@ end #@ def gci_resource(family): #@ return resource(gci_resource_name(family), "gci", { #@ "key": "((gcr-json-key))", #@ "family_project": "gemfire-dev", -#@ "family": family, +#@ "family": image_family_name(family), #@ }, "google-cloud") #@ end -#@ def create_instance(image_family): +#@ def create_instance(image_resource): task: create image: task-image config: platform: linux inputs: - - name: #@ gci_resource_name(image_family) + - name: #@ image_resource path: image outputs: - name: identity - name: instance params: BUILD_IMAGE_FILE: image/name - BUILD_IMAGE_FAMILY: #@ image_family run: path: bash args: - - -c + - -xc #@yaml/text-templated-strings - | set -ueo pipefail @@ -109,7 +112,7 @@ config: --boot-disk-type=pd-standard \ --boot-disk-device-name=${BUILD_INSTANCE} \ --image-project=${BUILD_IMAGE_PROJECT} \ - --image-family=${BUILD_IMAGE_FAMILY} \ + --image=${BUILD_IMAGE} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ | tee ${instance_file} @@ -162,7 +165,7 @@ config: #@ load("@ytt:template", "template") -#@ def build(config, params={}): +#@ def build_task(config, params={}): task: build image: task-image config: @@ -254,35 +257,31 @@ config: --quiet #@ end -#@ def build_job(name, config, image_family, params): -name: #@ "build-" + name + "-" + config +#@ def build_job(build, config): +name: #@ "build-" + build.name + "-" + config.name plan: - - get: task-image - - get: source - - get: #@ gci_resource_name(image_family) + - in_parallel: + fail_fast: true + steps: + - get: task-image + trigger: true + - get: source + trigger: true + - get: #@ gci_resource_name(build.image_family) + trigger: true - do: - - #@ create_instance(image_family) + - #@ create_instance(gci_resource_name(build.image_family)) - #@ await_instance() - - #@ build(config, params) - - #@ remote_task("unit-tests", config, run_unit_tests(), "5m", params) - - #@ remote_task("integration-tests", config, run_integration_tests(), "30m", params) - - #@ remote_task("legacy-integration-tests", config, run_legacy_integration_tests(), "2h", params) + - #@ build_task(config.config, build.params) + - #@ remote_task("unit-tests", config.config, run_unit_tests(), "5m", build.params) + - #@ remote_task("integration-tests", config.config, run_integration_tests(), "30m", build.params) + - #@ remote_task("legacy-integration-tests", config.config, run_legacy_integration_tests(), "1h", build.params) ensure: #@ delete_instance() #@ end #! TODO data #@ geode_native_uri = "http://github.com/pivotal-jbarrett/geode-native.git" #@ geode_native_branch = "wip/images-gcp" -#@ windows_build_params = { -#@ "CMAKE_CONFIGURE_FLAGS": "-A x64 -Thost=x64", -#@ "CMAKE_BUILD_FLAGS": "/m", -#@ "CPACK_GENERATORS": "ZIP" -#@ } -#@ linux_build_params = { -#@ "CMAKE_CONFIGURE_FLAGS": "", -#@ "CMAKE_BUILD_FLAGS": "-j8", -#@ "CPACK_GENERATORS": "TGZ" -#@ } resource_types: - name: gci @@ -293,18 +292,13 @@ resource_types: resources: - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ git_resource("source", geode_native_uri, geode_native_branch) - #! - #@ git_resource("ci", geode_native_uri, geode_native_branch, ["ci/*"]) - - #@ gci_resource("jbarrett-10-2-build-windows-2019-vs-2017") - - #@ gci_resource("jbarrett-10-2-build-rhel-7") - - #@ gci_resource("jbarrett-10-2-build-rhel-8") - - #@ gci_resource("jbarrett-10-2-build-ubuntu-16-04") - - #@ gci_resource("jbarrett-10-2-build-ubuntu-18-04") + #@ for build in data.values.builds: + - #@ gci_resource(build.image_family) + #@ end jobs: - #@ for config in ["Debug", "RelWithDebInfo"]: - - #@ build_job("windows", config, "jbarrett-10-2-build-windows-2019-vs-2017", windows_build_params) - - #@ build_job("rhel-7", config, "jbarrett-10-2-build-rhel-7", linux_build_params) - - #@ build_job("rhel-8", config, "jbarrett-10-2-build-rhel-8", linux_build_params) - - #@ build_job("ubuntu-16-04", config, "jbarrett-10-2-build-ubuntu-16-04", linux_build_params) - - #@ build_job("ubuntu-18-04", config, "jbarrett-10-2-build-ubuntu-18-04", linux_build_params) + #@ for build in data.values.builds: + #@ for config in data.values.configs: + - #@ build_job(build, config) + #@ end #@ end From 90edb7b2e9812dcc0fe7073ddf01329e10ad0bc7 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 5 Jul 2020 23:15:40 -0700 Subject: [PATCH 023/155] Adds whole test timeout --- ci/remote.lib.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/remote.lib.txt b/ci/remote.lib.txt index 4e11246c44..69540ebe2e 100644 --- a/ci/remote.lib.txt +++ b/ci/remote.lib.txt @@ -43,17 +43,17 @@ function remote_shell { (@ def run_unit_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im apache-geode_unittests.exe || true remote_shell 'pkill ^ctest$; pkill ^apache-geode_unittests$' || true -remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --rerun-failed (@- end @) (@ def run_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im cpp-integration-test.exe /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^cpp-integration-test$; pkill ^java$' || true -remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed -E ^BasicIPv6Test +remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --stop-time=1200 --output-on-failure --rerun-failed -E ^BasicIPv6Test (@- end @) (@ def run_legacy_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^test; pkill ^java$' || true -remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j4 --timeout=500 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --stop-time=2700 --output-on-failure --rerun-failed (@- end @) From 6cba0a05e0a6cda067800e5eafaae30103647bfd Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 6 Jul 2020 09:14:41 -0700 Subject: [PATCH 024/155] Pipeline improvements. --- ci/README.md | 3 +++ ci/data.yml | 8 ++++---- ci/pipeline.yml | 22 ++++------------------ 3 files changed, 11 insertions(+), 22 deletions(-) create mode 100644 ci/README.md diff --git a/ci/README.md b/ci/README.md new file mode 100644 index 0000000000..0d6ae5e803 --- /dev/null +++ b/ci/README.md @@ -0,0 +1,3 @@ +```console +ytt -f pipeline.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml +``` diff --git a/ci/data.yml b/ci/data.yml index d39761f9c1..ef81e911ea 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -11,25 +11,25 @@ builds: image_family: build-rhel-7 params: CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j8" + CMAKE_BUILD_FLAGS: "-j16" CPACK_GENERATORS: "TGZ" - name: rhel-8 image_family: build-rhel-8 params: CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j8" + CMAKE_BUILD_FLAGS: "-j16" CPACK_GENERATORS: "TGZ" - name: ubuntu-16.04 image_family: build-ubuntu-16-04 params: CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j8" + CMAKE_BUILD_FLAGS: "-j16" CPACK_GENERATORS: "TGZ" - name: ubuntu-18.04 image_family: build-ubuntu-18-04 params: CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j8" + CMAKE_BUILD_FLAGS: "-j16" CPACK_GENERATORS: "TGZ" configs: diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 356dee452f..773852fc97 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -62,11 +62,10 @@ config: - name: identity - name: instance params: - BUILD_IMAGE_FILE: image/name run: path: bash args: - - -xc + - -c #@yaml/text-templated-strings - | set -ueo pipefail @@ -76,22 +75,9 @@ config: BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} BUILD_SUBNET=${BUILD_SUBNET:-default} BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} - BUILD_IMAGE=${BUILD_IMAGE:-''} - BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-''} - BUILD_IMAGE_FAMILY=${BUILD_IMAGE_FAMILY:-''} - - if [ -z "${BUILD_IMAGE}" ]; then - if [ -n "${BUILD_IMAGE_FILE}" ]; then - BUILD_IMAGE=$(cat ${BUILD_IMAGE_FILE}) - elif [ -n "${BUILD_IMAGE_FAMILY}" ]; then - BUILD_IMAGE=$(gcloud compute images describe-from-family ${BUILD_IMAGE_FAMILY} --project=${BUILD_IMAGE_PROJECT} --format 'value(name)') - fi - fi + BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-'image/name'} - if [ -z "${BUILD_IMAGE}" ]; then - echo "No build image." - exit 1; - fi + build_image=$(cat ${BUILD_IMAGE_FILE}) ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} ssh_pubkey_file=${ssh_key_file}.pub @@ -112,7 +98,7 @@ config: --boot-disk-type=pd-standard \ --boot-disk-device-name=${BUILD_INSTANCE} \ --image-project=${BUILD_IMAGE_PROJECT} \ - --image=${BUILD_IMAGE} \ + --image=${build_image} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ | tee ${instance_file} From 451d6adc34720f244e1ad65aea8ba50e46a02ebe Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 06:38:05 +0000 Subject: [PATCH 025/155] Fixes legacy integration port allocations. --- clicache/integration-test/CacheHelperN.cs | 23 +++++------ tests/cli/DUnitFramework/UnitProcess.cs | 4 +- tests/cli/DUnitFramework/Util.cs | 48 ++++++++--------------- tests/cli/NewFwkLib/Utils.cs | 2 +- 4 files changed, 27 insertions(+), 50 deletions(-) diff --git a/clicache/integration-test/CacheHelperN.cs b/clicache/integration-test/CacheHelperN.cs index cdcd52ed1c..44e6e5176c 100644 --- a/clicache/integration-test/CacheHelperN.cs +++ b/clicache/integration-test/CacheHelperN.cs @@ -321,7 +321,6 @@ public static string TestDir private const string JavaServerStopArgs = "stop server"; private const string LocatorStartArgs = "start locator"; private const string LocatorStopArgs = "stop locator"; - private const int LocatorPort = 34755; private const int MaxWaitMillis = 60000; private static char PathSep = Path.DirectorySeparatorChar; @@ -1764,23 +1763,23 @@ public static void createRandomPorts() { if (HOST_PORT_1 == 0) { - HOST_PORT_1 = Util.RandPort(10000, 64000); - HOST_PORT_2 = Util.RandPort(10000, 64000); - HOST_PORT_3 = Util.RandPort(10000, 64000); - HOST_PORT_4 = Util.RandPort(10000, 64000); + HOST_PORT_1 = Util.GetAvailablePort(); + HOST_PORT_2 = Util.GetAvailablePort(); + HOST_PORT_3 = Util.GetAvailablePort(); + HOST_PORT_4 = Util.GetAvailablePort(); } if (LOCATOR_PORT_1 == 0) { - LOCATOR_PORT_1 = Util.RandPort(10000, 64000); - LOCATOR_PORT_2 = Util.RandPort(10000, 64000); - LOCATOR_PORT_3 = Util.RandPort(10000, 64000); - LOCATOR_PORT_4 = Util.RandPort(10000, 64000); + LOCATOR_PORT_1 = Util.GetAvailablePort(); + LOCATOR_PORT_2 = Util.GetAvailablePort(); + LOCATOR_PORT_3 = Util.GetAvailablePort(); + LOCATOR_PORT_4 = Util.GetAvailablePort(); } if (JMX_MANAGER_PORT == 0) { - JMX_MANAGER_PORT = Util.RandPort(10000, 64000); + JMX_MANAGER_PORT = Util.GetAvailablePort(); } } @@ -1806,10 +1805,6 @@ public static void StartJavaLocator(int locatorNum, string startDir) { StartJavaLocator(locatorNum, startDir, null); } - public static int getBaseLocatorPort() - { - return LocatorPort; - } public static void StartJavaLocator(int locatorNum, string startDir, string extraLocatorArgs) diff --git a/tests/cli/DUnitFramework/UnitProcess.cs b/tests/cli/DUnitFramework/UnitProcess.cs index 50705a076b..dbd0531703 100644 --- a/tests/cli/DUnitFramework/UnitProcess.cs +++ b/tests/cli/DUnitFramework/UnitProcess.cs @@ -68,7 +68,6 @@ public override string StartDir #region Private members private static int m_clientId = 0; - private static int m_clientPort = Util.RandPort(20000, 40000) - 1; internal static Dictionary ProcessIDMap = new Dictionary(); @@ -206,8 +205,7 @@ public static int GetClientId() public static int GetClientPort() { - Interlocked.Increment(ref m_clientPort); - return m_clientPort; + return Util.GetAvailablePort(); } private UnitProcess(string clientId) diff --git a/tests/cli/DUnitFramework/Util.cs b/tests/cli/DUnitFramework/Util.cs index 0a067b3872..95e588c260 100644 --- a/tests/cli/DUnitFramework/Util.cs +++ b/tests/cli/DUnitFramework/Util.cs @@ -454,7 +454,7 @@ public static string SystemType private static Random m_rnd = new Random((int)DateTime.Now.Ticks); private static IDriverComm m_driverComm = null; - private static int m_driverPort = RandPort(20000, 40000); + private static int m_driverPort = 0; private static IPAddress m_ipAddress = null; private static IBBComm m_bbComm = null; private static string m_externalBBServer = null; @@ -474,7 +474,12 @@ public static string SystemType private const string NoServerConnMsg = "Server connection not established."; private static Dictionary m_testCompleteMap = new Dictionary(); + + static Util() + { + DriverPort = GetAvailablePort(); + } #endregion /// @@ -753,41 +758,20 @@ public static byte[] RandBytes(int size) } /// - /// Get a free random port in the given range. + /// Get a free ephemeral port. /// - /// - /// The inclusive lower bound of the random integer. - /// - /// - /// The exclusive upper bound of the random integer. - /// /// the free port number - public static int RandPort(int min, int max) + public static int GetAvailablePort() { - int portNo; - while (true) + Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); + try { - portNo = Rand(min, max); - Socket s = new Socket(AddressFamily.InterNetwork, - SocketType.Stream, ProtocolType.Tcp); - try - { - s.Bind(new IPEndPoint(IPAddress.Any, portNo)); - s.Close(); - return portNo; - } - catch (SocketException ex) - { - // EADDRINUSE? - if (ex.ErrorCode == 10048) - { - continue; - } - else - { - throw; - } - } + s.Bind(new IPEndPoint(IPAddress.Any, 0)); + return ((IPEndPoint)s.LocalEndPoint).Port; + } + finally + { + s.Close(); } } diff --git a/tests/cli/NewFwkLib/Utils.cs b/tests/cli/NewFwkLib/Utils.cs index c645e3b285..54213e0af4 100644 --- a/tests/cli/NewFwkLib/Utils.cs +++ b/tests/cli/NewFwkLib/Utils.cs @@ -459,7 +459,7 @@ private void SetupJavaServers(string argStr) cacheXml = Util.NormalizePath(xmlDir + PathSep + cacheXml); } - int javaServerPort = Util.RandPort(21321, 29789); + int javaServerPort = Util.GetAvailablePort(); List targetHosts = Util.BBGet(FwkReadData.HostGroupKey, hostGroup) as List; From 376520d242885379962ed2c6d6d89d573258bc43 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 08:20:29 -0700 Subject: [PATCH 026/155] Reverts timeout of ClientProcess --- tests/cli/FwkClient/ClientProcess.cs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/cli/FwkClient/ClientProcess.cs b/tests/cli/FwkClient/ClientProcess.cs index 8e3dfc29b6..e573c54b55 100644 --- a/tests/cli/FwkClient/ClientProcess.cs +++ b/tests/cli/FwkClient/ClientProcess.cs @@ -32,7 +32,7 @@ class ClientProcess public static IChannel clientChannel = null; public static string bbUrl; public static string logFile = null; - static int Main(string[] args) + static void Main(string[] args) { string myId = "0"; try @@ -78,9 +78,7 @@ static int Main(string[] args) { Util.Log("FATAL: Client {0}, Exception caught: {1}", myId, ex); } - System.Threading.Thread.Sleep(TimeSpan.FromMinutes(10)); - Util.Log("FATAL: Client {0}, Terminating after timeout.", myId); - return -1; + System.Threading.Thread.Sleep(System.Threading.Timeout.Infinite); } private static void ShowUsage(string[] args) From ce6ae49e30d2fa0b1d432a166d3ecc8c8db7c85c Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 09:56:01 -0700 Subject: [PATCH 027/155] Use inheritance for build data. --- ci/data.yml | 27 ++++++++---------- ci/pipeline.yml | 72 +++++++++++++++++++++++++++++++++++++---------- ci/remote.lib.txt | 27 ++++++++++++++---- 3 files changed, 89 insertions(+), 37 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index ef81e911ea..ce97cb5be9 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -1,36 +1,31 @@ #@data/values --- +default: + build: + name: #@ None + image_family: #@ None + with_dot_net: #@ False + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j16" + CPACK_GENERATORS: "TGZ" + builds: - name: windows image_family: build-windows-2019-vs-2017 + with_dot_net: #@ True params: CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" CMAKE_BUILD_FLAGS: "/m" CPACK_GENERATORS: "ZIP" - name: rhel-7 image_family: build-rhel-7 - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - name: rhel-8 image_family: build-rhel-8 - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - name: ubuntu-16.04 image_family: build-ubuntu-16-04 - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - name: ubuntu-18.04 image_family: build-ubuntu-18-04 - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" configs: - name: debug diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 773852fc97..9a288252dd 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -2,8 +2,18 @@ #! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml #! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main -#@ load("remote.lib.txt", "remote_shell", "remote_build_variables", "run_unit_tests", "run_integration_tests", "run_legacy_integration_tests") +#@ load("remote.lib.txt", +#@ "remote_shell", +#@ "remote_build_variables", +#@ "run_cpp_unit_tests", +#@ "run_cpp_integration_tests", +#@ "run_cpp_legacy_integration_tests", +#@ "run_net_unit_tests", +#@ "run_net_integration_tests", +#@ "run_net_legacy_integration_tests") #@ load("@ytt:data", "data") +#@ load("@ytt:overlay", "overlay") +#@ load("@ytt:struct", "struct") #@ def resource(name, type, source, icon=None): name: #@ name @@ -180,10 +190,10 @@ config: git_rev=$(git rev-parse HEAD) popd - remote_shell git clone ${git_url} - remote_shell cmake -E chdir geode-native git checkout ${git_rev} + remote_shell git clone ${git_url} source + remote_shell cmake -E chdir source git checkout ${git_rev} remote_shell cmake -E make_directory build - remote_shell cmake -E chdir build cmake ../geode-native ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} + remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" @@ -191,11 +201,11 @@ config: #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ #@ end -#@ def remote_task(name, config, commands, timeout, params={}): +#@ def remote_task(name, config, commands, timeout, params={}, attempts=1): task: #@ name timeout: #@ timeout image: task-image -attempts: 5 +attempts: #@ attempts config: platform: linux inputs: @@ -216,6 +226,30 @@ config: (@= commands @) #@ end +#@ def cpp_unit_test_task(build, config): +#@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params, 5) +#@ end + +#@ def cpp_integration_test_task(build, config): +#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) +#@ end + +#@ def cpp_legacy_integration_test_task(build, config): +#@ return remote_task("cpp-legacy-integration-tests", config.config, run_cpp_legacy_integration_tests(), "1h", build.params, 5) +#@ end + +#@ def net_unit_test_task(build, config): +#@ return remote_task(".net-unit-tests", config.config, run_net_unit_tests(), "5m", build.params) +#@ end + +#@ def net_integration_test_task(build, config): +#@ return remote_task(".net-integration-tests", config.config, run_net_integration_tests(), "30m", build.params) +#@ end + +#@ def net_legacy_integration_test_task(build, config): +#@ return remote_task(".net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) +#@ end + #@ def delete_instance(): task: delete image: task-image @@ -259,12 +293,22 @@ plan: - #@ create_instance(gci_resource_name(build.image_family)) - #@ await_instance() - #@ build_task(config.config, build.params) - - #@ remote_task("unit-tests", config.config, run_unit_tests(), "5m", build.params) - - #@ remote_task("integration-tests", config.config, run_integration_tests(), "30m", build.params) - - #@ remote_task("legacy-integration-tests", config.config, run_legacy_integration_tests(), "1h", build.params) + - #@ cpp_unit_test_task(build, config) + - #@ cpp_integration_test_task(build, config) + - #@ cpp_legacy_integration_test_task(build, config) + #@ if build.with_dot_net: + - #@ net_unit_test_task(build, config) + - #@ net_integration_test_task(build, config) + - #@ net_legacy_integration_test_task(build, config) + #@ end ensure: #@ delete_instance() #@ end +#@ def extends_build(build): +#@ return struct.make(**overlay.apply(data.values.default.build, build)) +#@ end + + #! TODO data #@ geode_native_uri = "http://github.com/pivotal-jbarrett/geode-native.git" #@ geode_native_branch = "wip/images-gcp" @@ -278,13 +322,11 @@ resource_types: resources: - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ git_resource("source", geode_native_uri, geode_native_branch) - #@ for build in data.values.builds: - - #@ gci_resource(build.image_family) - #@ end + #@ for/end build in data.values.builds: + - #@ gci_resource(extends_build(build).image_family) jobs: #@ for build in data.values.builds: - #@ for config in data.values.configs: - - #@ build_job(build, config) - #@ end + #@ for/end config in data.values.configs: + - #@ build_job(extends_build(build), config) #@ end diff --git a/ci/remote.lib.txt b/ci/remote.lib.txt index 69540ebe2e..b0dbb07054 100644 --- a/ci/remote.lib.txt +++ b/ci/remote.lib.txt @@ -40,20 +40,35 @@ function remote_shell { } (@- end @) -(@ def run_unit_tests(): -@) +(@ def run_cpp_unit_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im apache-geode_unittests.exe || true remote_shell 'pkill ^ctest$; pkill ^apache-geode_unittests$' || true -remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --rerun-failed (@- end @) -(@ def run_integration_tests(): -@) +(@ def run_cpp_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im cpp-integration-test.exe /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^cpp-integration-test$; pkill ^java$' || true -remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --stop-time=1200 --output-on-failure --rerun-failed -E ^BasicIPv6Test +remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --stop-time=1200 --output-on-failure --rerun-failed -E ^BasicIPv6Test (@- end @) -(@ def run_legacy_integration_tests(): -@) +(@ def run_cpp_legacy_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^test; pkill ^java$' || true -remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --stop-time=2700 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --stop-time=2700 --output-on-failure --rerun-failed +(@- end @) + +(@ def run_net_unit_tests(): -@) +remote_shell taskkill /t /f /im xunit.console.exe || true +remote_shell cmake -E chdir build/clicache/test2 ../packages/xunit.runner.console.2.4.0/tools/net452/xunit.console.exe ${CMAKE_CONFIG}/Apache.Geode.Tests2.dll -parallel all +(@- end @) + +(@ def run_net_integration_tests(): -@) +remote_shell taskkill /t /f /im xunit.console.exe /im java.exe || true +remote_shell cmake -E chdir build/clicache/integration-test2 ../packages/xunit.runner.console.2.4.0/tools/net452/xunit.console.exe ${CMAKE_CONFIG}/Apache.Geode.IntegrationTests2.dll -verbose -maxthreads 6 +(@- end @) + +(@ def run_net_legacy_integration_tests(): -@) +remote_shell taskkill /t /f /im ctest.exe /im nunit-console.exe /im nunit-agent.exe /im fwkclient.exe /im java.exe || true +remote_shell cmake -E chdir build/clicache/integration-test ctest -C ${CMAKE_CONFIG} -j8 --timeout=1000 --stop-time=2700 --output-on-failure --rerun-failed (@- end @) From 5f95d1d96790163036ed7dacb344ec6e328df336 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 15:46:13 -0700 Subject: [PATCH 028/155] Fixes out of memory based hangs. --- ci/remote.lib.txt | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ci/remote.lib.txt b/ci/remote.lib.txt index b0dbb07054..e83fb92c21 100644 --- a/ci/remote.lib.txt +++ b/ci/remote.lib.txt @@ -49,13 +49,14 @@ remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j8 --t (@ def run_cpp_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im cpp-integration-test.exe /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^cpp-integration-test$; pkill ^java$' || true -remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --stop-time=1200 --output-on-failure --rerun-failed -E ^BasicIPv6Test +remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --rerun-failed -E ^BasicIPv6Test (@- end @) (@ def run_cpp_legacy_integration_tests(): -@) +set -x remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^test; pkill ^java$' || true -remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --stop-time=2700 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --rerun-failed (@- end @) (@ def run_net_unit_tests(): -@) @@ -70,5 +71,5 @@ remote_shell cmake -E chdir build/clicache/integration-test2 ../packages/xunit.r (@ def run_net_legacy_integration_tests(): -@) remote_shell taskkill /t /f /im ctest.exe /im nunit-console.exe /im nunit-agent.exe /im fwkclient.exe /im java.exe || true -remote_shell cmake -E chdir build/clicache/integration-test ctest -C ${CMAKE_CONFIG} -j8 --timeout=1000 --stop-time=2700 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/clicache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=1000 --output-on-failure --rerun-failed (@- end @) From 66c7a615386912bc1b1a35c28a7449c3b36794ed Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 16:14:19 -0700 Subject: [PATCH 029/155] Split up template --- ci/data.yml | 4 + ci/pipeline.yml | 310 +------------------------------------------ ci/templates.lib.yml | 303 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 313 insertions(+), 304 deletions(-) create mode 100644 ci/templates.lib.yml diff --git a/ci/data.yml b/ci/data.yml index ce97cb5be9..271c74bfba 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -1,5 +1,9 @@ #@data/values --- +source: + repository: "http://github.com/pivotal-jbarrett/geode-native.git" + branch: "wip/images-gcp" + default: build: name: #@ None diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 9a288252dd..0f05246fec 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -2,317 +2,19 @@ #! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml #! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main -#@ load("remote.lib.txt", -#@ "remote_shell", -#@ "remote_build_variables", -#@ "run_cpp_unit_tests", -#@ "run_cpp_integration_tests", -#@ "run_cpp_legacy_integration_tests", -#@ "run_net_unit_tests", -#@ "run_net_integration_tests", -#@ "run_net_legacy_integration_tests") +#@ load("templates.lib.yml", +#@ "gcr_image_resource", +#@ "git_resource", +#@ "gci_resource", +#@ "build_job") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") -#@ def resource(name, type, source, icon=None): -name: #@ name -type: #@ type -source: #@ source -icon: #@ icon -#@ end - -#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): -#@ return resource(name, "docker-image", { -#@ "repository": repository, -#@ "tag": tag, -#@ "username": username, -#@ "password": password -#@ }, "docker") -#@ end - -#@ def gcr_image_resource(name, repository, tag="latest"): -#@ return docker_image_resource(name, repository, tag, "_json_key", "((gcr-json-key))") -#@ end - -#@ def git_resource(name, uri, branch, paths=[], depth=1): -#@ return resource(name, "git", { -#@ "branch": branch, -#@ "depth": depth, -#@ "paths": paths, -#@ "uri": uri -#@ }, "github") -#@ end - -#@ def image_family_name(family): -#@ return "jbarrett-10-2-" + family -#@ end - -#@ def gci_resource_name(family): -#@ return image_family_name(family) + "-gci" -#@ end - -#@ def gci_resource(family): -#@ return resource(gci_resource_name(family), "gci", { -#@ "key": "((gcr-json-key))", -#@ "family_project": "gemfire-dev", -#@ "family": image_family_name(family), -#@ }, "google-cloud") -#@ end - -#@ def create_instance(image_resource): -task: create -image: task-image -config: - platform: linux - inputs: - - name: #@ image_resource - path: image - outputs: - - name: identity - - name: instance - params: - run: - path: bash - args: - - -c - #@yaml/text-templated-strings - - | - set -ueo pipefail - (@= remote_build_variables() @) - - BUILD_PREFIX=${BUILD_PREFIX:-build} - BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} - BUILD_SUBNET=${BUILD_SUBNET:-default} - BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} - BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-'image/name'} - - build_image=$(cat ${BUILD_IMAGE_FILE}) - - ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} - ssh_pubkey_file=${ssh_key_file}.pub - ssh-keygen -m pem -t rsa -f ${ssh_key_file} -C ${BUILD_USER} -N '' <<< y - ssh_pubkey=$(cat ${ssh_pubkey_file}) - - ssh_keys_file=${BUILD_IDENTITY_DIR}/ssh_keys_file - echo "${BUILD_USER}:${ssh_pubkey}" > ${ssh_keys_file} - - instance_file=${BUILD_INSTANCE_DIR}/instance.json - gcloud compute instances create ${BUILD_INSTANCE} \ - --format json \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ - --subnet=${BUILD_SUBNET} \ - --machine-type=e2-standard-16 \ - --boot-disk-size=200GB \ - --boot-disk-type=pd-standard \ - --boot-disk-device-name=${BUILD_INSTANCE} \ - --image-project=${BUILD_IMAGE_PROJECT} \ - --image=${build_image} \ - --metadata-from-file ssh-keys=${ssh_keys_file} \ - | tee ${instance_file} - - (@=remote_shell() @) - - echo "ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip}" -#@ end - -#@ def await_instance(): -task: await -image: task-image -config: - platform: linux - inputs: - - name: identity - - name: instance - params: - run: - path: bash - args: - - -c - #@yaml/text-templated-strings - - | - set -ueo pipefail - (@=remote_shell() @) - SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" - - instance_name=$(jq -r '.[0].name' ${instance_file}) - - echo "Waiting for ssh on ${instance_name} to be ready." - console_file=$(mktemp) - console_next=0 - while ! remote_shell echo ready 2>/dev/null ; do - gcloud compute instances get-serial-port-output ${instance_name} \ - --start ${console_next} \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ - --format json \ - > ${console_file} - - console_next=$(jq -r '.next' ${console_file}) - console_contents=$(jq -r '.contents' ${console_file}) - if [ ! -z "${console_contents}" ]; then - echo -n "${console_contents}" - fi - done - - rm -f ${console_file} -#@ end - -#@ load("@ytt:template", "template") - -#@ def build_task(config, params={}): -task: build -image: task-image -config: - platform: linux - inputs: - - name: identity - - name: instance - - name: source - outputs: - - name: package - params: - _: #@ template.replace(params) - _: #@ template.replace({"CMAKE_CONFIG": config}) - run: - path: bash - args: - - -c - #@yaml/text-templated-strings - - | - set -ueo pipefail - - (@= remote_shell() @) - - pushd source - git_url=$(git remote get-url origin) - git_rev=$(git rev-parse HEAD) - popd - - remote_shell git clone ${git_url} source - remote_shell cmake -E chdir source git checkout ${git_rev} - remote_shell cmake -E make_directory build - remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} - remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} - remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} - remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" - - #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ -#@ end - -#@ def remote_task(name, config, commands, timeout, params={}, attempts=1): -task: #@ name -timeout: #@ timeout -image: task-image -attempts: #@ attempts -config: - platform: linux - inputs: - - name: identity - - name: instance - outputs: - params: - _: #@ template.replace(params) - _: #@ template.replace({"CMAKE_CONFIG": config}) - run: - path: bash - args: - - -c - #@yaml/text-templated-strings - - | - set -ueo pipefail - (@= remote_shell() @) - (@= commands @) -#@ end - -#@ def cpp_unit_test_task(build, config): -#@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params, 5) -#@ end - -#@ def cpp_integration_test_task(build, config): -#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) -#@ end - -#@ def cpp_legacy_integration_test_task(build, config): -#@ return remote_task("cpp-legacy-integration-tests", config.config, run_cpp_legacy_integration_tests(), "1h", build.params, 5) -#@ end - -#@ def net_unit_test_task(build, config): -#@ return remote_task(".net-unit-tests", config.config, run_net_unit_tests(), "5m", build.params) -#@ end - -#@ def net_integration_test_task(build, config): -#@ return remote_task(".net-integration-tests", config.config, run_net_integration_tests(), "30m", build.params) -#@ end - -#@ def net_legacy_integration_test_task(build, config): -#@ return remote_task(".net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) -#@ end - -#@ def delete_instance(): -task: delete -image: task-image -config: - platform: linux - inputs: - - name: identity - - name: instance - params: - run: - path: bash - args: - - -c - #@yaml/text-templated-strings - - | - set -ueo pipefail - (@= remote_shell() @) - - instance_name=$(jq -r '.[0].name' ${instance_file}) - - gcloud compute instances delete ${instance_name} \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ - --delete-disks=all \ - --quiet -#@ end - -#@ def build_job(build, config): -name: #@ "build-" + build.name + "-" + config.name -plan: - - in_parallel: - fail_fast: true - steps: - - get: task-image - trigger: true - - get: source - trigger: true - - get: #@ gci_resource_name(build.image_family) - trigger: true - - do: - - #@ create_instance(gci_resource_name(build.image_family)) - - #@ await_instance() - - #@ build_task(config.config, build.params) - - #@ cpp_unit_test_task(build, config) - - #@ cpp_integration_test_task(build, config) - - #@ cpp_legacy_integration_test_task(build, config) - #@ if build.with_dot_net: - - #@ net_unit_test_task(build, config) - - #@ net_integration_test_task(build, config) - - #@ net_legacy_integration_test_task(build, config) - #@ end - ensure: #@ delete_instance() -#@ end - #@ def extends_build(build): #@ return struct.make(**overlay.apply(data.values.default.build, build)) #@ end - -#! TODO data -#@ geode_native_uri = "http://github.com/pivotal-jbarrett/geode-native.git" -#@ geode_native_branch = "wip/images-gcp" - resource_types: - name: gci type: docker-image @@ -321,7 +23,7 @@ resource_types: resources: - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - - #@ git_resource("source", geode_native_uri, geode_native_branch) + - #@ git_resource("source", data.values.source.repository, data.values.source.branch) #@ for/end build in data.values.builds: - #@ gci_resource(extends_build(build).image_family) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml new file mode 100644 index 0000000000..6f61a53912 --- /dev/null +++ b/ci/templates.lib.yml @@ -0,0 +1,303 @@ +#! helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 +#! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml +#! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main + +#@ load("remote.lib.txt", +#@ "remote_shell", +#@ "remote_build_variables", +#@ "run_cpp_unit_tests", +#@ "run_cpp_integration_tests", +#@ "run_cpp_legacy_integration_tests", +#@ "run_net_unit_tests", +#@ "run_net_integration_tests", +#@ "run_net_legacy_integration_tests") + +#@ def resource(name, type, source, icon=None): +name: #@ name +type: #@ type +source: #@ source +icon: #@ icon +#@ end + +#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): +#@ return resource(name, "docker-image", { +#@ "repository": repository, +#@ "tag": tag, +#@ "username": username, +#@ "password": password +#@ }, "docker") +#@ end + +#@ def gcr_image_resource(name, repository, tag="latest"): +#@ return docker_image_resource(name, repository, tag, "_json_key", "((gcr-json-key))") +#@ end + +#@ def git_resource(name, uri, branch, paths=[], depth=1): +#@ return resource(name, "git", { +#@ "branch": branch, +#@ "depth": depth, +#@ "paths": paths, +#@ "uri": uri +#@ }, "github") +#@ end + +#@ def image_family_name(family): +#@ return "jbarrett-10-2-" + family +#@ end + +#@ def gci_resource_name(family): +#@ return image_family_name(family) + "-gci" +#@ end + +#@ def gci_resource(family): +#@ return resource(gci_resource_name(family), "gci", { +#@ "key": "((gcr-json-key))", +#@ "family_project": "gemfire-dev", +#@ "family": image_family_name(family), +#@ }, "google-cloud") +#@ end + +#@ def create_instance(image_resource): +task: create +image: task-image +config: + platform: linux + inputs: + - name: #@ image_resource + path: image + outputs: + - name: identity + - name: instance + params: + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -ueo pipefail + (@= remote_build_variables() @) + + BUILD_PREFIX=${BUILD_PREFIX:-build} + BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} + BUILD_SUBNET=${BUILD_SUBNET:-default} + BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} + BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-'image/name'} + + build_image=$(cat ${BUILD_IMAGE_FILE}) + + ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} + ssh_pubkey_file=${ssh_key_file}.pub + ssh-keygen -m pem -t rsa -f ${ssh_key_file} -C ${BUILD_USER} -N '' <<< y + ssh_pubkey=$(cat ${ssh_pubkey_file}) + + ssh_keys_file=${BUILD_IDENTITY_DIR}/ssh_keys_file + echo "${BUILD_USER}:${ssh_pubkey}" > ${ssh_keys_file} + + instance_file=${BUILD_INSTANCE_DIR}/instance.json + gcloud compute instances create ${BUILD_INSTANCE} \ + --format json \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --subnet=${BUILD_SUBNET} \ + --machine-type=e2-standard-16 \ + --boot-disk-size=200GB \ + --boot-disk-type=pd-standard \ + --boot-disk-device-name=${BUILD_INSTANCE} \ + --image-project=${BUILD_IMAGE_PROJECT} \ + --image=${build_image} \ + --metadata-from-file ssh-keys=${ssh_keys_file} \ + | tee ${instance_file} + + (@=remote_shell() @) + + echo "ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip}" +#@ end + +#@ def await_instance(): +task: await +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + params: + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -ueo pipefail + (@=remote_shell() @) + SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" + + instance_name=$(jq -r '.[0].name' ${instance_file}) + + echo "Waiting for ssh on ${instance_name} to be ready." + console_file=$(mktemp) + console_next=0 + while ! remote_shell echo ready 2>/dev/null ; do + gcloud compute instances get-serial-port-output ${instance_name} \ + --start ${console_next} \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --format json \ + > ${console_file} + + console_next=$(jq -r '.next' ${console_file}) + console_contents=$(jq -r '.contents' ${console_file}) + if [ ! -z "${console_contents}" ]; then + echo -n "${console_contents}" + fi + done + + rm -f ${console_file} +#@ end + +#@ load("@ytt:template", "template") + +#@ def build_task(config, params={}): +task: build +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + - name: source + outputs: + - name: package + params: + _: #@ template.replace(params) + _: #@ template.replace({"CMAKE_CONFIG": config}) + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -ueo pipefail + + (@= remote_shell() @) + + pushd source + git_url=$(git remote get-url origin) + git_rev=$(git rev-parse HEAD) + popd + + remote_shell git clone ${git_url} source + remote_shell cmake -E chdir source git checkout ${git_rev} + remote_shell cmake -E make_directory build + remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} + remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} + remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} + remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" + + #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ +#@ end + +#@ def remote_task(name, config, commands, timeout, params={}, attempts=1): +task: #@ name +timeout: #@ timeout +image: task-image +attempts: #@ attempts +config: + platform: linux + inputs: + - name: identity + - name: instance + outputs: + params: + _: #@ template.replace(params) + _: #@ template.replace({"CMAKE_CONFIG": config}) + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -ueo pipefail + (@= remote_shell() @) + (@= commands @) +#@ end + +#@ def cpp_unit_test_task(build, config): +#@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params, 5) +#@ end + +#@ def cpp_integration_test_task(build, config): +#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) +#@ end + +#@ def cpp_legacy_integration_test_task(build, config): +#@ return remote_task("cpp-legacy-integration-tests", config.config, run_cpp_legacy_integration_tests(), "1h", build.params, 5) +#@ end + +#@ def net_unit_test_task(build, config): +#@ return remote_task(".net-unit-tests", config.config, run_net_unit_tests(), "5m", build.params) +#@ end + +#@ def net_integration_test_task(build, config): +#@ return remote_task(".net-integration-tests", config.config, run_net_integration_tests(), "30m", build.params) +#@ end + +#@ def net_legacy_integration_test_task(build, config): +#@ return remote_task(".net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) +#@ end + +#@ def delete_instance(): +task: delete +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + params: + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -ueo pipefail + (@= remote_shell() @) + + instance_name=$(jq -r '.[0].name' ${instance_file}) + + gcloud compute instances delete ${instance_name} \ + --project=${BUILD_PROJECT} \ + --zone=${BUILD_ZONE} \ + --delete-disks=all \ + --quiet +#@ end + +#@ def build_job(build, config): +name: #@ "build-" + build.name + "-" + config.name +plan: + - in_parallel: + fail_fast: true + steps: + - get: task-image + trigger: true + - get: source + trigger: true + - get: #@ gci_resource_name(build.image_family) + trigger: true + - do: + - #@ create_instance(gci_resource_name(build.image_family)) + - #@ await_instance() + - #@ build_task(config.config, build.params) + - #@ cpp_unit_test_task(build, config) + - #@ cpp_integration_test_task(build, config) + - #@ cpp_legacy_integration_test_task(build, config) + #@ if build.with_dot_net: + - #@ net_unit_test_task(build, config) + - #@ net_integration_test_task(build, config) + - #@ net_legacy_integration_test_task(build, config) + #@ end + ensure: #@ delete_instance() +#@ end + From ed34dd8545b273c23bcab73e215da0fa3bdc8a5b Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 16:40:17 -0700 Subject: [PATCH 030/155] Improve parallel tasks --- ci/templates.lib.yml | 46 ++++++++++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 6f61a53912..4f79e0c727 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -277,27 +277,31 @@ config: #@ def build_job(build, config): name: #@ "build-" + build.name + "-" + config.name plan: - - in_parallel: - fail_fast: true - steps: - - get: task-image - trigger: true - - get: source - trigger: true - - get: #@ gci_resource_name(build.image_family) - trigger: true - do: - - #@ create_instance(gci_resource_name(build.image_family)) - - #@ await_instance() - - #@ build_task(config.config, build.params) - - #@ cpp_unit_test_task(build, config) - - #@ cpp_integration_test_task(build, config) - - #@ cpp_legacy_integration_test_task(build, config) - #@ if build.with_dot_net: - - #@ net_unit_test_task(build, config) - - #@ net_integration_test_task(build, config) - - #@ net_legacy_integration_test_task(build, config) - #@ end + - in_parallel: + fail_fast: true + steps: + - get: source + trigger: true + - do: + - in_parallel: + fail_fast: true + steps: + - get: task-image + trigger: true + - get: #@ gci_resource_name(build.image_family) + trigger: true + - #@ create_instance(gci_resource_name(build.image_family)) + - #@ await_instance() + - do: + - #@ build_task(config.config, build.params) + - #@ cpp_unit_test_task(build, config) + - #@ cpp_integration_test_task(build, config) + - #@ cpp_legacy_integration_test_task(build, config) + #@ if build.with_dot_net: + - #@ net_unit_test_task(build, config) + - #@ net_integration_test_task(build, config) + - #@ net_legacy_integration_test_task(build, config) + #@ end ensure: #@ delete_instance() #@ end - From 6127569cbd20d3b4eb829120886a96f11cdacf55 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 18:09:05 -0700 Subject: [PATCH 031/155] Fixes legacy timebomb --- cppcache/integration-test/test.bat.in | 2 +- cppcache/integration-test/test.sh.in | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cppcache/integration-test/test.bat.in b/cppcache/integration-test/test.bat.in index 096d783474..e3f00afb06 100644 --- a/cppcache/integration-test/test.bat.in +++ b/cppcache/integration-test/test.bat.in @@ -32,7 +32,7 @@ set GFE_LOGLEVEL=config set GFE_SECLOGLEVEL=config set MCAST_ADDR=224.10.13.63 set MCAST_PORT=${PORT} -set TIMEBOMB=3600 +set TIMEBOMB=500 set GF_CLASSPATH=%GF_CLASSPATH%;${CMAKE_BINARY_DIR}/tests/javaobject/javaobject.jar set PROFILERCMD= set BUG481= diff --git a/cppcache/integration-test/test.sh.in b/cppcache/integration-test/test.sh.in index 8eb6368642..0b3cfbfa2e 100644 --- a/cppcache/integration-test/test.sh.in +++ b/cppcache/integration-test/test.sh.in @@ -33,7 +33,7 @@ export GFE_LOGLEVEL=config export GFE_SECLOGLEVEL=config export MCAST_ADDR=224.10.13.63 export MCAST_PORT=${PORT} -export TIMEBOMB=0 +export TIMEBOMB=500 export GF_CLASSPATH=$GF_CLASSPATH:${CMAKE_BINARY_DIR}/tests/javaobject/javaobject.jar export PROFILERCMD= export BUG481= From 671dd0f730811037d26407bf75aef9ed820eea0d Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 18:48:15 -0700 Subject: [PATCH 032/155] Download build directory on failure. --- ci/remote.lib.txt | 5 +++ ci/templates.lib.yml | 79 ++++++++++++++++++++++++++++---------------- 2 files changed, 56 insertions(+), 28 deletions(-) diff --git a/ci/remote.lib.txt b/ci/remote.lib.txt index e83fb92c21..c9705b8bee 100644 --- a/ci/remote.lib.txt +++ b/ci/remote.lib.txt @@ -38,6 +38,11 @@ external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instanc function remote_shell { ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip} "$@" } + +function remote_copy_recursive { + scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${BUILD_USER}@${external_ip}:${1}" "$2" +} + (@- end @) (@ def run_cpp_unit_tests(): -@) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 4f79e0c727..de89784121 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -224,11 +224,11 @@ config: #@ end #@ def cpp_unit_test_task(build, config): -#@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params, 5) +#@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params) #@ end #@ def cpp_integration_test_task(build, config): -#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) +#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params) #@ end #@ def cpp_legacy_integration_test_task(build, config): @@ -247,6 +247,29 @@ config: #@ return remote_task(".net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) #@ end +#@ def download_build_task(): +task: download-build +image: task-image +config: + platform: linux + inputs: + - name: identity + - name: instance + outputs: + - name: build + params: + run: + path: bash + args: + - -xc + #@yaml/text-templated-strings + - | + set -ueo pipefail + (@= remote_shell() @) + + remote_copy_recursive build . +#@ end + #@ def delete_instance(): task: delete image: task-image @@ -277,31 +300,31 @@ config: #@ def build_job(build, config): name: #@ "build-" + build.name + "-" + config.name plan: + - in_parallel: + fail_fast: true + steps: + - get: source + trigger: true + - do: + - in_parallel: + fail_fast: true + steps: + - get: task-image + trigger: true + - get: #@ gci_resource_name(build.image_family) + trigger: true + - #@ create_instance(gci_resource_name(build.image_family)) + - #@ await_instance() - do: - - in_parallel: - fail_fast: true - steps: - - get: source - trigger: true - - do: - - in_parallel: - fail_fast: true - steps: - - get: task-image - trigger: true - - get: #@ gci_resource_name(build.image_family) - trigger: true - - #@ create_instance(gci_resource_name(build.image_family)) - - #@ await_instance() - - do: - - #@ build_task(config.config, build.params) - - #@ cpp_unit_test_task(build, config) - - #@ cpp_integration_test_task(build, config) - - #@ cpp_legacy_integration_test_task(build, config) - #@ if build.with_dot_net: - - #@ net_unit_test_task(build, config) - - #@ net_integration_test_task(build, config) - - #@ net_legacy_integration_test_task(build, config) - #@ end - ensure: #@ delete_instance() + - #@ build_task(config.config, build.params) + - #@ cpp_unit_test_task(build, config) + - #@ cpp_integration_test_task(build, config) + - #@ cpp_legacy_integration_test_task(build, config) + #@ if build.with_dot_net: + - #@ net_unit_test_task(build, config) + - #@ net_integration_test_task(build, config) + - #@ net_legacy_integration_test_task(build, config) + #@ end +on_failure: #@ download_build_task() +ensure: #@ delete_instance() #@ end From 4d6e88b5ae4485b4d84d2dc310484691ab2d40c4 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 19:39:28 -0700 Subject: [PATCH 033/155] Default to null attempts --- ci/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index de89784121..f0382fd314 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -198,7 +198,7 @@ config: #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ #@ end -#@ def remote_task(name, config, commands, timeout, params={}, attempts=1): +#@ def remote_task(name, config, commands, timeout, params={}, attempts=None): task: #@ name timeout: #@ timeout image: task-image From fe4d738077141daa7871d77a3ac5e70782de3e25 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 7 Jul 2020 20:03:36 -0700 Subject: [PATCH 034/155] Update attempts --- ci/README.md | 3 +-- ci/templates.lib.yml | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/ci/README.md b/ci/README.md index 0d6ae5e803..18b8f833c2 100644 --- a/ci/README.md +++ b/ci/README.md @@ -1,3 +1,2 @@ ```console -ytt -f pipeline.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml -``` +ytt -f pipeline.yml -f templates.lib.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml``` diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index f0382fd314..3cec02ec06 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -228,7 +228,7 @@ config: #@ end #@ def cpp_integration_test_task(build, config): -#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params) +#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) #@ end #@ def cpp_legacy_integration_test_task(build, config): From b14e2e78151f8918fbf8dfe7ff02267e846d45f8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 8 Jul 2020 23:02:45 -0700 Subject: [PATCH 035/155] Build images --- ci/README.md | 9 ++- ci/data.yml | 8 ++- ci/pipeline.yml | 20 ++++--- ci/templates.lib.yml | 56 +++++++++++++++++-- packer/build-rhel-7.json | 19 ++++--- packer/build-rhel-8.json | 19 ++++--- ...ntu-16.04.json => build-ubuntu-16-04.json} | 19 ++++--- ...ntu-18.04.json => build-ubuntu-18-04.json} | 19 ++++--- packer/build-windows-2016-vs-2017.json | 19 ++++--- packer/build-windows-2019-vs-2017.json | 19 ++++--- 10 files changed, 143 insertions(+), 64 deletions(-) rename packer/{build-ubuntu-16.04.json => build-ubuntu-16-04.json} (85%) rename packer/{build-ubuntu-18.04.json => build-ubuntu-18-04.json} (85%) diff --git a/ci/README.md b/ci/README.md index 18b8f833c2..95276abb4e 100644 --- a/ci/README.md +++ b/ci/README.md @@ -1,2 +1,9 @@ +# Pipeline ```console -ytt -f pipeline.yml -f templates.lib.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml``` +ytt -f pipeline.yml -f templates.lib.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml +``` + +#TODO +* helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 +* ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml +* kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main diff --git a/ci/data.yml b/ci/data.yml index 271c74bfba..d2ab994318 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -1,8 +1,10 @@ #@data/values --- -source: - repository: "http://github.com/pivotal-jbarrett/geode-native.git" - branch: "wip/images-gcp" +repository: + base: http://github.com/ + owner: pivotal-jbarrett + project: geode-native + branch: wip/images-gcp default: build: diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 0f05246fec..3372109039 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -6,7 +6,10 @@ #@ "gcr_image_resource", #@ "git_resource", #@ "gci_resource", -#@ "build_job") +#@ "docker_image_resource", +#@ "build_job", +#@ "packer_job", +#@ "repository_url") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") @@ -22,13 +25,16 @@ resource_types: repository: smgoller/gci-resource resources: - - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - - #@ git_resource("source", data.values.source.repository, data.values.source.branch) - #@ for/end build in data.values.builds: - - #@ gci_resource(extends_build(build).image_family) + #! - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") + - #@ docker_image_resource("packer-image", "hashicorp/packer") + #! - #@ git_resource("source", repository_url(data.values.repository), data.values.repository.branch) + - #@ git_resource("packer-source", repository_url(data.values.repository), data.values.repository.branch, ["packer/*"]) + #! #@ for/end build in data.values.builds: + #! - #@ gci_resource(extends_build(build).image_family) jobs: #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job(extends_build(build), config) + #! #@ for/end config in data.values.configs: + #! - #@ build_job(extends_build(build), config) + - #@ packer_job(extends_build(build)) #@ end diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 3cec02ec06..b01c26861c 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -1,6 +1,4 @@ -#! helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 -#! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml -#! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main +#@ load("@ytt:data", "data") #@ load("remote.lib.txt", #@ "remote_shell", @@ -12,7 +10,11 @@ #@ "run_net_integration_tests", #@ "run_net_legacy_integration_tests") -#@ def resource(name, type, source, icon=None): +#@ def repository_url(repository): +#@ return repository.base + repository.owner + "/" + repository.project + ".git" +#@ end + +#@ def resource(name, type, source=None, icon=None): name: #@ name type: #@ type source: #@ source @@ -41,8 +43,12 @@ icon: #@ icon #@ }, "github") #@ end +#@ def hyphenated(value): +#@ return value.replace(".", "-").replace("/", "-") +#@ end + #@ def image_family_name(family): -#@ return "jbarrett-10-2-" + family +#@ return data.values.repository.owner + "-" + hyphenated(data.values.repository.branch) + "-" + family #@ end #@ def gci_resource_name(family): @@ -223,6 +229,10 @@ config: (@= commands @) #@ end +#@ def packer_build_task(build): +#@ end + + #@ def cpp_unit_test_task(build, config): #@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params) #@ end @@ -328,3 +338,39 @@ plan: on_failure: #@ download_build_task() ensure: #@ delete_instance() #@ end + +#@ def packer_job(build): +name: #@ "packer-" + build.image_family +plan: + - in_parallel: + fail_fast: true + steps: + - get: packer-image + - get: packer-source + - task: build + image: packer-image + config: + platform: linux + inputs: + - name: packer-source + path: source + params: + run: + path: bash + args: + - -xc + #@yaml/text-templated-strings + - | + set -ueo pipefail + env + cd source/packer + packer build -only=googlecompute \ + -var-file=default.json \ + -var owner=(@= data.values.repository.owner @) \ + -var product_name=(@= data.values.repository.project @) \ + -var version=(@= data.values.repository.branch @) \ + -var image_name_prefix=(@= image_family_name(build.image_family)[0:51] @) \ + -var image_family=(@= image_family_name(build.image_family)[0:62] @) + (@= build.image_family @).json + +#@ end diff --git a/packer/build-rhel-7.json b/packer/build-rhel-7.json index 657631daa3..92a5da0e39 100644 --- a/packer/build-rhel-7.json +++ b/packer/build-rhel-7.json @@ -3,14 +3,15 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "build-rhel-7" + "image_family": "build-rhel-7", + "image_name_prefix": "{{user `image_family`}}" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "ami_name": "{{user `image_family`}}-{{timestamp}}", "region": "{{user `aws_region`}}", "source_ami_filter": { "filters": { @@ -26,8 +27,9 @@ "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "ssh_username": "ec2-user", "ssh_pty": true @@ -38,11 +40,12 @@ "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", "source_image_family": "rhel-7", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "disk_size": "20", "ssh_username": "packer" diff --git a/packer/build-rhel-8.json b/packer/build-rhel-8.json index a7c108d183..251304978c 100644 --- a/packer/build-rhel-8.json +++ b/packer/build-rhel-8.json @@ -3,14 +3,15 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "build-rhel-8" + "image_family": "build-rhel-8", + "image_name_prefix": "{{user `image_family`}}" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "ami_name": "{{user `image_family`}}-{{timestamp}}", "region": "{{user `aws_region`}}", "source_ami_filter": { "filters": { @@ -26,8 +27,9 @@ "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "ssh_username": "ec2-user", "ssh_pty": true @@ -38,11 +40,12 @@ "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", "source_image_family": "rhel-8", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "disk_size": "20", "ssh_username": "packer" diff --git a/packer/build-ubuntu-16.04.json b/packer/build-ubuntu-16-04.json similarity index 85% rename from packer/build-ubuntu-16.04.json rename to packer/build-ubuntu-16-04.json index c5dcb18dfb..c7605b134a 100644 --- a/packer/build-ubuntu-16.04.json +++ b/packer/build-ubuntu-16-04.json @@ -3,14 +3,15 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "build-ubuntu-16-04" + "image_family": "build-ubuntu-16-04", + "image_name_prefix": "{{user `image_family`}}" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "ami_name": "{{user `image_family`}}-{{timestamp}}", "region": "{{user `aws_region`}}", "source_ami_filter": { "filters": { @@ -26,8 +27,9 @@ "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "ssh_username": "ubuntu", "ssh_pty": true @@ -38,11 +40,12 @@ "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", "source_image_family": "ubuntu-1604-lts", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "ssh_username": "packer" } diff --git a/packer/build-ubuntu-18.04.json b/packer/build-ubuntu-18-04.json similarity index 85% rename from packer/build-ubuntu-18.04.json rename to packer/build-ubuntu-18-04.json index 90198d6ddd..756547b6ff 100644 --- a/packer/build-ubuntu-18.04.json +++ b/packer/build-ubuntu-18-04.json @@ -3,14 +3,15 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "build-ubuntu-18-04" + "image_family": "build-ubuntu-18-04", + "image_name_prefix": "{{user `image_family`}}" }, "builders": [ { "type": "amazon-ebs", "instance_type": "t2.micro", "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "ami_name": "{{user `image_family`}}-{{timestamp}}", "region": "{{user `aws_region`}}", "source_ami_filter": { "filters": { @@ -26,8 +27,9 @@ "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "ssh_username": "ubuntu", "ssh_pty": true @@ -38,11 +40,12 @@ "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", "source_image_family": "ubuntu-1804-lts", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "ssh_username": "packer" } diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index e5c69539cd..2c5abc8f6c 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -3,14 +3,15 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "build-windows-2016-vs-2017" + "image_family": "build-windows-2016-vs-2017", + "image_name_prefix": "{{user `image_family`}}" }, "builders": [ { "type": "amazon-ebs", "instance_type": "c5d.2xlarge", "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "ami_name": "{{user `image_family`}}-{{timestamp}}", "region": "{{user `aws_region`}}", "source_ami_filter": { "filters": { @@ -26,8 +27,9 @@ "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "launch_block_device_mappings": [ { @@ -48,11 +50,12 @@ "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", "source_image_family": "windows-2016", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "disk_size": "100", "metadata": { diff --git a/packer/build-windows-2019-vs-2017.json b/packer/build-windows-2019-vs-2017.json index ded4742aa1..4b81d74384 100644 --- a/packer/build-windows-2019-vs-2017.json +++ b/packer/build-windows-2019-vs-2017.json @@ -3,14 +3,15 @@ "aws_region": "", "googlecompute_zone": "", "googlecompute_project": "", - "image_name": "build-windows-2019-vs-2017" + "image_family": "build-windows-2019-vs-2017", + "image_name_prefix": "{{user `image_family`}}" }, "builders": [ { "type": "amazon-ebs", "instance_type": "c5d.2xlarge", "ami_virtualization_type": "hvm", - "ami_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", + "ami_name": "{{user `image_family`}}-{{timestamp}}", "region": "{{user `aws_region`}}", "source_ami_filter": { "filters": { @@ -26,8 +27,9 @@ "subnet_id": "{{user `subnet_id`}}", "vpc_id": "{{user `vpc_id`}}", "tags": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "launch_block_device_mappings": [ { @@ -48,11 +50,12 @@ "project_id": "{{user `googlecompute_project`}}", "zone": "{{user `googlecompute_zone`}}", "source_image_family": "windows-2019", - "image_name": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}-{{timestamp}}", - "image_family": "{{user `product_name`}}-{{user `version`}}-{{user `image_name`}}", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", "image_labels": { - "product": "{{user `product_name`}}", - "version": "{{user `version`}}" + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" }, "disk_size": "100", "metadata": { From 6b40e6493b64fe7016442d17b2823ae04af48133 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 9 Jul 2020 06:56:30 -0700 Subject: [PATCH 036/155] Fixes some image build issues --- ci/pipeline.yml | 12 ++++++------ ci/templates.lib.yml | 23 +++++++++++++++-------- packer/default.json | 1 - 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 3372109039..c8585c7ddf 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -25,16 +25,16 @@ resource_types: repository: smgoller/gci-resource resources: - #! - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") + - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ docker_image_resource("packer-image", "hashicorp/packer") - #! - #@ git_resource("source", repository_url(data.values.repository), data.values.repository.branch) + - #@ git_resource("source", repository_url(data.values.repository), data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - #@ git_resource("packer-source", repository_url(data.values.repository), data.values.repository.branch, ["packer/*"]) - #! #@ for/end build in data.values.builds: - #! - #@ gci_resource(extends_build(build).image_family) + #@ for/end build in data.values.builds: + - #@ gci_resource(extends_build(build).image_family) jobs: #@ for build in data.values.builds: - #! #@ for/end config in data.values.configs: - #! - #@ build_job(extends_build(build), config) + #@ for/end config in data.values.configs: + - #@ build_job(extends_build(build), config) - #@ packer_job(extends_build(build)) #@ end diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index b01c26861c..ceb72a2fe8 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -34,21 +34,23 @@ icon: #@ icon #@ return docker_image_resource(name, repository, tag, "_json_key", "((gcr-json-key))") #@ end -#@ def git_resource(name, uri, branch, paths=[], depth=1): +#@ def git_resource(name, uri, branch, paths=[], ignore_paths=[], depth=1): #@ return resource(name, "git", { #@ "branch": branch, #@ "depth": depth, #@ "paths": paths, +#@ "ignore_paths": ignore_paths, #@ "uri": uri #@ }, "github") #@ end #@ def hyphenated(value): +#! TODO [a-z0-9-] #@ return value.replace(".", "-").replace("/", "-") #@ end #@ def image_family_name(family): -#@ return data.values.repository.owner + "-" + hyphenated(data.values.repository.branch) + "-" + family +#@ return (data.values.repository.owner + "-" + hyphenated(data.values.repository.branch) + "-" + family)[0:62] #@ end #@ def gci_resource_name(family): @@ -339,6 +341,11 @@ on_failure: #@ download_build_task() ensure: #@ delete_instance() #@ end +#@ def gci_label_value(value): +#! TODO [a-z0-9_-] +#@ return hyphenated(value[0:62]) +#@ end + #@ def packer_job(build): name: #@ "packer-" + build.image_family plan: @@ -347,6 +354,7 @@ plan: steps: - get: packer-image - get: packer-source + trigger: false - task: build image: packer-image config: @@ -358,19 +366,18 @@ plan: run: path: bash args: - - -xc + - -c #@yaml/text-templated-strings - | set -ueo pipefail - env cd source/packer packer build -only=googlecompute \ -var-file=default.json \ - -var owner=(@= data.values.repository.owner @) \ - -var product_name=(@= data.values.repository.project @) \ - -var version=(@= data.values.repository.branch @) \ + -var owner=(@= gci_label_value(data.values.repository.owner) @) \ + -var project=(@= gci_label_value(data.values.repository.project) @) \ + -var branch=(@= gci_label_value(data.values.repository.branch) @) \ -var image_name_prefix=(@= image_family_name(build.image_family)[0:51] @) \ - -var image_family=(@= image_family_name(build.image_family)[0:62] @) + -var image_family=(@= image_family_name(build.image_family) @) \ (@= build.image_family @).json #@ end diff --git a/packer/default.json b/packer/default.json index d02af97e3a..e37b87415e 100644 --- a/packer/default.json +++ b/packer/default.json @@ -1,5 +1,4 @@ { - "product_name": "native", "region": "us-west-2", "aws_region": "us-west-2", "googlecompute_project": "gemfire-dev", From fd9308d5acd1316f4d56772e5eedae1a907e0e2a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 9 Jul 2020 23:19:34 -0700 Subject: [PATCH 037/155] Updates readme --- ci/README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ci/README.md b/ci/README.md index 95276abb4e..d504ccab47 100644 --- a/ci/README.md +++ b/ci/README.md @@ -3,6 +3,14 @@ ytt -f pipeline.yml -f templates.lib.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml ``` +# Steps +1. Creates instances +2. Waits for instance to be accessible +3. Builds and packages +4. Runs all tests +5. If anything fails it downloads the build directory for later analysis +6. Deletes the instance + #TODO * helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 * ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml From 56c02276bb65052cd76e9e32b7924c05900a5e2e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 9 Jul 2020 23:19:55 -0700 Subject: [PATCH 038/155] Triggers images on changes to source image. --- ci/data.yml | 6 ++++++ ci/pipeline.yml | 7 +++++-- ci/set-pipeline.sh | 9 +++++++++ ci/{remote.lib.txt => templates.lib.txt} | 0 ci/templates.lib.yml | 21 +++++++++++++-------- 5 files changed, 33 insertions(+), 10 deletions(-) create mode 100755 ci/set-pipeline.sh rename ci/{remote.lib.txt => templates.lib.txt} (100%) diff --git a/ci/data.yml b/ci/data.yml index d2ab994318..e053485735 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -10,6 +10,7 @@ default: build: name: #@ None image_family: #@ None + source_image_family: #@ None with_dot_net: #@ False params: CMAKE_CONFIGURE_FLAGS: @@ -19,6 +20,7 @@ default: builds: - name: windows image_family: build-windows-2019-vs-2017 + source_image_family: windows-2019 with_dot_net: #@ True params: CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" @@ -26,12 +28,16 @@ builds: CPACK_GENERATORS: "ZIP" - name: rhel-7 image_family: build-rhel-7 + source_image_family: rhel-7 - name: rhel-8 image_family: build-rhel-8 + source_image_family: rhel-8 - name: ubuntu-16.04 image_family: build-ubuntu-16-04 + source_image_family: ubuntu-1604-lts - name: ubuntu-18.04 image_family: build-ubuntu-18-04 + source_image_family: ubuntu-1804-lts configs: - name: debug diff --git a/ci/pipeline.yml b/ci/pipeline.yml index c8585c7ddf..acd6c70c3a 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -7,6 +7,7 @@ #@ "git_resource", #@ "gci_resource", #@ "docker_image_resource", +#@ "image_family_name", #@ "build_job", #@ "packer_job", #@ "repository_url") @@ -29,8 +30,10 @@ resources: - #@ docker_image_resource("packer-image", "hashicorp/packer") - #@ git_resource("source", repository_url(data.values.repository), data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - #@ git_resource("packer-source", repository_url(data.values.repository), data.values.repository.branch, ["packer/*"]) - #@ for/end build in data.values.builds: - - #@ gci_resource(extends_build(build).image_family) + #@ for build in data.values.builds: + - #@ gci_resource(image_family_name(extends_build(build).image_family), "gemfire-dev") + - #@ gci_resource(extends_build(build).source_image_family, "gemfire-dev") + #@ end jobs: #@ for build in data.values.builds: diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh new file mode 100755 index 0000000000..f8b9426de3 --- /dev/null +++ b/ci/set-pipeline.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +YTT=${YTT:-ytt} +FLY=${FLY:-fly} + +${YTT} -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml > output.yml +${FLY} -t test set-pipeline -p test -c output.yml diff --git a/ci/remote.lib.txt b/ci/templates.lib.txt similarity index 100% rename from ci/remote.lib.txt rename to ci/templates.lib.txt diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index ceb72a2fe8..2801f0e7b3 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -1,6 +1,6 @@ #@ load("@ytt:data", "data") -#@ load("remote.lib.txt", +#@ load("templates.lib.txt", #@ "remote_shell", #@ "remote_build_variables", #@ "run_cpp_unit_tests", @@ -54,14 +54,14 @@ icon: #@ icon #@ end #@ def gci_resource_name(family): -#@ return image_family_name(family) + "-gci" +#@ return family + "-gci" #@ end -#@ def gci_resource(family): +#@ def gci_resource(family, project=None): #@ return resource(gci_resource_name(family), "gci", { #@ "key": "((gcr-json-key))", -#@ "family_project": "gemfire-dev", -#@ "family": image_family_name(family), +#@ "family_project": project, +#@ "family": family, #@ }, "google-cloud") #@ end @@ -102,6 +102,8 @@ config: ssh_keys_file=${BUILD_IDENTITY_DIR}/ssh_keys_file echo "${BUILD_USER}:${ssh_pubkey}" > ${ssh_keys_file} + time_to_live=$(( $(date +%s) + ( 4 * 60 * 60 ) )) + instance_file=${BUILD_INSTANCE_DIR}/instance.json gcloud compute instances create ${BUILD_INSTANCE} \ --format json \ @@ -115,6 +117,7 @@ config: --image-project=${BUILD_IMAGE_PROJECT} \ --image=${build_image} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ + --labels=time-to-live=${time_to_live} | tee ${instance_file} (@=remote_shell() @) @@ -323,9 +326,9 @@ plan: steps: - get: task-image trigger: true - - get: #@ gci_resource_name(build.image_family) + - get: #@ gci_resource_name(image_family_name(build.image_family)) trigger: true - - #@ create_instance(gci_resource_name(build.image_family)) + - #@ create_instance(gci_resource_name(image_family_name(build.image_family))) - #@ await_instance() - do: - #@ build_task(config.config, build.params) @@ -353,8 +356,10 @@ plan: fail_fast: true steps: - get: packer-image + - get: #@ gci_resource_name(build.source_image_family) + trigger: true - get: packer-source - trigger: false + trigger: true - task: build image: packer-image config: From 700be2d0e24887715ffbc9195ce451e871d0210e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 9 Jul 2020 23:49:32 -0700 Subject: [PATCH 039/155] Adds labels to instances. --- ci/README.md | 7 +++++-- ci/templates.lib.txt | 2 +- ci/templates.lib.yml | 18 +++++++++++------- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/ci/README.md b/ci/README.md index d504ccab47..50d240be4a 100644 --- a/ci/README.md +++ b/ci/README.md @@ -1,7 +1,10 @@ -# Pipeline +# Publish Pipeline ```console -ytt -f pipeline.yml -f templates.lib.yml -f remote.lib.txt -f data.yml> output.yml && fly -t test set-pipeline -p test -c output.yml +./set-pipeline.sh ``` +Produces output.yml side effect. +Set `FLY=/path/to/fly` to select version of fly. + # Steps 1. Creates instances diff --git a/ci/templates.lib.txt b/ci/templates.lib.txt index c9705b8bee..6bae58b7cf 100644 --- a/ci/templates.lib.txt +++ b/ci/templates.lib.txt @@ -58,7 +58,7 @@ remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CON (@- end @) (@ def run_cpp_legacy_integration_tests(): -@) -set -x + remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true remote_shell 'pkill ^ctest$; pkill ^test; pkill ^java$' || true remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --rerun-failed diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 2801f0e7b3..25f8fb8b2b 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -46,7 +46,7 @@ icon: #@ icon #@ def hyphenated(value): #! TODO [a-z0-9-] -#@ return value.replace(".", "-").replace("/", "-") +#@ return value.lower().replace(".", "-").replace("/", "-") #@ end #@ def image_family_name(family): @@ -65,18 +65,22 @@ icon: #@ icon #@ }, "google-cloud") #@ end -#@ def create_instance(image_resource): +#@ def create_instance(build, config): task: create image: task-image config: platform: linux inputs: - - name: #@ image_resource + - name: #@ gci_resource_name(image_family_name(build.image_family)) path: image outputs: - name: identity - name: instance params: + LABEL_REPOSITORY_OWNER: #@ gci_label_value(data.values.repository.owner) + LABEL_REPOSITORY_PROJECT: #@ gci_label_value(data.values.repository.project) + LABEL_REPOSITORY_BRANCH: #@ gci_label_value(data.values.repository.branch) + LABEL_BUILD_CONFIG: #@ config.name run: path: bash args: @@ -117,7 +121,7 @@ config: --image-project=${BUILD_IMAGE_PROJECT} \ --image=${build_image} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ - --labels=time-to-live=${time_to_live} + --labels=time-to-live=${time_to_live},repository-owner=${LABEL_REPOSITORY_OWNER},repository-project=${LABEL_REPOSITORY_PROJECT},repository-branch=${LABEL_REPOSITORY_BRANCH},build-config=${LABEL_BUILD_CONFIG} \ | tee ${instance_file} (@=remote_shell() @) @@ -276,7 +280,7 @@ config: run: path: bash args: - - -xc + - -c #@yaml/text-templated-strings - | set -ueo pipefail @@ -328,7 +332,7 @@ plan: trigger: true - get: #@ gci_resource_name(image_family_name(build.image_family)) trigger: true - - #@ create_instance(gci_resource_name(image_family_name(build.image_family))) + - #@ create_instance(build, config) - #@ await_instance() - do: - #@ build_task(config.config, build.params) @@ -359,7 +363,7 @@ plan: - get: #@ gci_resource_name(build.source_image_family) trigger: true - get: packer-source - trigger: true + trigger: false - task: build image: packer-image config: From b49f2864641d148d303f6679a998768202945346 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 00:24:45 -0700 Subject: [PATCH 040/155] Self update pipeline --- ci/pipeline.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index acd6c70c3a..cc66476e4c 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -28,7 +28,9 @@ resource_types: resources: - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ docker_image_resource("packer-image", "hashicorp/packer") + - #@ docker_image_resource("ytt-image", "gerritk/ytt") - #@ git_resource("source", repository_url(data.values.repository), data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) + - #@ git_resource("ci-source", repository_url(data.values.repository), data.values.repository.branch, ["ci/*"]) - #@ git_resource("packer-source", repository_url(data.values.repository), data.values.repository.branch, ["packer/*"]) #@ for build in data.values.builds: - #@ gci_resource(image_family_name(extends_build(build).image_family), "gemfire-dev") @@ -41,3 +43,31 @@ jobs: - #@ build_job(extends_build(build), config) - #@ packer_job(extends_build(build)) #@ end + - name: set-pipeline + serial: true + plan: + - in_parallel: + fail_fast: true + steps: + - get: ytt-image + - get: ci-source + trigger: true + - task: ytt + image: ytt-image + config: + platform: linux + inputs: + - name: ci-source + path: source + outputs: + - name: pipeline + params: + run: + path: /usr/bin/ytt + args: + - --file + - source/ci/. + - --output-files + - pipeline/ + - set_pipeline: test + file: pipeline/pipeline.yml From 4dcf9bc2878df0ffa329224b675f38a95f3d2dea Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 00:35:15 -0700 Subject: [PATCH 041/155] Move update pipeline job --- ci/pipeline.yml | 57 ++++++++++++++++++++++---------------------- ci/templates.lib.yml | 32 ++++++++++++++++++++++++- 2 files changed, 60 insertions(+), 29 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index cc66476e4c..0889cc5ea1 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -10,6 +10,7 @@ #@ "image_family_name", #@ "build_job", #@ "packer_job", +#@ "update_pipeline_job", #@ "repository_url") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") @@ -43,31 +44,31 @@ jobs: - #@ build_job(extends_build(build), config) - #@ packer_job(extends_build(build)) #@ end - - name: set-pipeline - serial: true - plan: - - in_parallel: - fail_fast: true - steps: - - get: ytt-image - - get: ci-source - trigger: true - - task: ytt - image: ytt-image - config: - platform: linux - inputs: - - name: ci-source - path: source - outputs: - - name: pipeline - params: - run: - path: /usr/bin/ytt - args: - - --file - - source/ci/. - - --output-files - - pipeline/ - - set_pipeline: test - file: pipeline/pipeline.yml + - #@ update_pipeline_job() + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 25f8fb8b2b..8d15af926b 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -329,7 +329,6 @@ plan: fail_fast: true steps: - get: task-image - trigger: true - get: #@ gci_resource_name(image_family_name(build.image_family)) trigger: true - #@ create_instance(build, config) @@ -390,3 +389,34 @@ plan: (@= build.image_family @).json #@ end + +#@ def update_pipeline_job(): +name: update-pipeline +serial: true +plan: + - in_parallel: + fail_fast: true + steps: + - get: ytt-image + - get: ci-source + trigger: true + - task: ytt + image: ytt-image + config: + platform: linux + inputs: + - name: ci-source + path: source + outputs: + - name: pipeline + params: + run: + path: /usr/bin/ytt + args: + - --file + - source/ci/. + - --output-files + - pipeline/ + - set_pipeline: test + file: pipeline/pipeline.yml +#@ end From 6d73fe08429c183dfaccf64ade98c3a45df99d80 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 08:23:16 -0700 Subject: [PATCH 042/155] Reduce complexity Use variables for project --- ci/data.yml | 4 +++ ci/pipeline.yml | 35 ++------------------------ ci/templates.lib.txt | 39 +++++++++++++++-------------- ci/templates.lib.yml | 59 +++++++++++++++++++++----------------------- 4 files changed, 54 insertions(+), 83 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index e053485735..b317d3aff9 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -1,5 +1,9 @@ #@data/values --- +google: + project: gemfire-dev + zone: us-central1-f + repository: base: http://github.com/ owner: pivotal-jbarrett diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 0889cc5ea1..26b6820ed0 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -1,7 +1,3 @@ -#! helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 -#! ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml -#! kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main - #@ load("templates.lib.yml", #@ "gcr_image_resource", #@ "git_resource", @@ -34,8 +30,8 @@ resources: - #@ git_resource("ci-source", repository_url(data.values.repository), data.values.repository.branch, ["ci/*"]) - #@ git_resource("packer-source", repository_url(data.values.repository), data.values.repository.branch, ["packer/*"]) #@ for build in data.values.builds: - - #@ gci_resource(image_family_name(extends_build(build).image_family), "gemfire-dev") - - #@ gci_resource(extends_build(build).source_image_family, "gemfire-dev") + - #@ gci_resource(image_family_name(extends_build(build).image_family)) + - #@ gci_resource(extends_build(build).source_image_family) #@ end jobs: @@ -45,30 +41,3 @@ jobs: - #@ packer_job(extends_build(build)) #@ end - #@ update_pipeline_job() - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/ci/templates.lib.txt b/ci/templates.lib.txt index 6bae58b7cf..2701c719e2 100644 --- a/ci/templates.lib.txt +++ b/ci/templates.lib.txt @@ -1,33 +1,34 @@ -(@ def remote_build_variables(): -@) -BUILD_USER=${BUILD_USER:-build} -BUILD_PROJECT=${BUILD_PROJECT:-gemfire-dev} -BUILD_ZONE=${BUILD_ZONE:-us-central1-f} -BUILD_IDENTITY_DIR=${BUILD_IDENTITY_DIR:-$(pwd)/identity} -BUILD_INSTANCE_DIR=${BUILD_INSTANCE_DIR:-$(pwd)/instance} - -if [ ! -d "${BUILD_IDENTITY_DIR}" ]; then - echo "${BUILD_IDENTITY_DIR} not found." - exit 1 -fi +(@ load("@ytt:data", "data") -@) + + +(@ def google_variables(): -@) +GOOGLE_PROJECT=(@=data.values.google.project @) +GOOGLE_ZONE=(@=data.values.google.zone @) +(@- end @) + + +(@ def instance_variables(): -@) +INSTANCE_USER=build +INSTANCE_DIR=$(pwd)/instance -if [ ! -d "${BUILD_INSTANCE_DIR}" ]; then - echo "${BUILD_INSTANCE_DIR} not found." +if [ ! -d "${INSTANCE_DIR}" ]; then + echo "${INSTANCE_DIR} not found." exit 1 fi (@- end @) -(@ def remote_shell(): -@) -(@=remote_build_variables() @) +(@ def remote_functions(): -@) +(@=instance_variables() @) SSH_OPTIONS=${SSH_OPTIONS:-"-o StrictHostKeyChecking=no -o PasswordAuthentication=no"} -ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} +ssh_key_file=${INSTANCE_DIR}/identity if [ ! -r "${ssh_key_file}" ]; then echo "${ssh_key_file} not readable." exit 1 fi -instance_file=${BUILD_INSTANCE_DIR}/instance.json +instance_file=${INSTANCE_DIR}/instance.json if [ ! -r "${instance_file}" ]; then echo "${instance_file} not readable." exit 1 @@ -36,11 +37,11 @@ fi external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) function remote_shell { - ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip} "$@" + ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} "$@" } function remote_copy_recursive { - scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${BUILD_USER}@${external_ip}:${1}" "$2" + scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${INSTANCE_USER}@${external_ip}:${1}" "$2" } (@- end @) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 8d15af926b..0d26d07abe 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -1,8 +1,9 @@ #@ load("@ytt:data", "data") #@ load("templates.lib.txt", -#@ "remote_shell", -#@ "remote_build_variables", +#@ "remote_functions", +#@ "instance_variables", +#@ "google_variables", #@ "run_cpp_unit_tests", #@ "run_cpp_integration_tests", #@ "run_cpp_legacy_integration_tests", @@ -57,7 +58,7 @@ icon: #@ icon #@ return family + "-gci" #@ end -#@ def gci_resource(family, project=None): +#@ def gci_resource(family, project=data.values.google.project): #@ return resource(gci_resource_name(family), "gci", { #@ "key": "((gcr-json-key))", #@ "family_project": project, @@ -74,7 +75,6 @@ config: - name: #@ gci_resource_name(image_family_name(build.image_family)) path: image outputs: - - name: identity - name: instance params: LABEL_REPOSITORY_OWNER: #@ gci_label_value(data.values.repository.owner) @@ -88,45 +88,45 @@ config: #@yaml/text-templated-strings - | set -ueo pipefail - (@= remote_build_variables() @) + (@= google_variables() @) + (@= instance_variables() @) BUILD_PREFIX=${BUILD_PREFIX:-build} BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} BUILD_SUBNET=${BUILD_SUBNET:-default} - BUILD_IMAGE_PROJECT=${BUILD_IMAGE_PROJECT:-${BUILD_PROJECT}} BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-'image/name'} build_image=$(cat ${BUILD_IMAGE_FILE}) - ssh_key_file=${BUILD_IDENTITY_DIR}/${BUILD_USER} + ssh_key_file=${INSTANCE_DIR}/identity ssh_pubkey_file=${ssh_key_file}.pub - ssh-keygen -m pem -t rsa -f ${ssh_key_file} -C ${BUILD_USER} -N '' <<< y + ssh-keygen -m pem -t rsa -f ${ssh_key_file} -C "${INSTANCE_USER}" -N '' <<< y ssh_pubkey=$(cat ${ssh_pubkey_file}) - ssh_keys_file=${BUILD_IDENTITY_DIR}/ssh_keys_file - echo "${BUILD_USER}:${ssh_pubkey}" > ${ssh_keys_file} + ssh_keys_file=${INSTANCE_DIR}/ssh_keys_file + echo "${INSTANCE_USER}:${ssh_pubkey}" > ${ssh_keys_file} time_to_live=$(( $(date +%s) + ( 4 * 60 * 60 ) )) - instance_file=${BUILD_INSTANCE_DIR}/instance.json + instance_file=${INSTANCE_DIR}/instance.json gcloud compute instances create ${BUILD_INSTANCE} \ --format json \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ + --project=${GOOGLE_PROJECT} \ + --zone=${GOOGLE_ZONE} \ --subnet=${BUILD_SUBNET} \ --machine-type=e2-standard-16 \ --boot-disk-size=200GB \ --boot-disk-type=pd-standard \ --boot-disk-device-name=${BUILD_INSTANCE} \ - --image-project=${BUILD_IMAGE_PROJECT} \ + --image-project=${GOOGLE_PROJECT} \ --image=${build_image} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ --labels=time-to-live=${time_to_live},repository-owner=${LABEL_REPOSITORY_OWNER},repository-project=${LABEL_REPOSITORY_PROJECT},repository-branch=${LABEL_REPOSITORY_BRANCH},build-config=${LABEL_BUILD_CONFIG} \ | tee ${instance_file} - (@=remote_shell() @) + (@=remote_functions() @) - echo "ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${BUILD_USER}@${external_ip}" + echo "ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip}" #@ end #@ def await_instance(): @@ -135,7 +135,6 @@ image: task-image config: platform: linux inputs: - - name: identity - name: instance params: run: @@ -145,7 +144,8 @@ config: #@yaml/text-templated-strings - | set -ueo pipefail - (@=remote_shell() @) + (@=google_variables() @) + (@=remote_functions() @) SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" instance_name=$(jq -r '.[0].name' ${instance_file}) @@ -156,8 +156,8 @@ config: while ! remote_shell echo ready 2>/dev/null ; do gcloud compute instances get-serial-port-output ${instance_name} \ --start ${console_next} \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ + --project=${GOOGLE_PROJECT} \ + --zone=${GOOGLE_ZONE} \ --format json \ > ${console_file} @@ -179,7 +179,6 @@ image: task-image config: platform: linux inputs: - - name: identity - name: instance - name: source outputs: @@ -195,7 +194,7 @@ config: - | set -ueo pipefail - (@= remote_shell() @) + (@= remote_functions() @) pushd source git_url=$(git remote get-url origin) @@ -210,7 +209,7 @@ config: remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" - #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${BUILD_USER}@${external_ip}:build/*.zip package/ + #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip}:build/*.zip package/ #@ end #@ def remote_task(name, config, commands, timeout, params={}, attempts=None): @@ -221,7 +220,6 @@ attempts: #@ attempts config: platform: linux inputs: - - name: identity - name: instance outputs: params: @@ -234,7 +232,7 @@ config: #@yaml/text-templated-strings - | set -ueo pipefail - (@= remote_shell() @) + (@= remote_functions() @) (@= commands @) #@ end @@ -272,7 +270,6 @@ image: task-image config: platform: linux inputs: - - name: identity - name: instance outputs: - name: build @@ -284,7 +281,7 @@ config: #@yaml/text-templated-strings - | set -ueo pipefail - (@= remote_shell() @) + (@= remote_functions() @) remote_copy_recursive build . #@ end @@ -295,7 +292,6 @@ image: task-image config: platform: linux inputs: - - name: identity - name: instance params: run: @@ -305,13 +301,14 @@ config: #@yaml/text-templated-strings - | set -ueo pipefail - (@= remote_shell() @) + (@= google_variables() @) + (@= remote_functions() @) instance_name=$(jq -r '.[0].name' ${instance_file}) gcloud compute instances delete ${instance_name} \ - --project=${BUILD_PROJECT} \ - --zone=${BUILD_ZONE} \ + --project=${GOOGLE_PROJECT} \ + --zone=${GOOGLE_ZONE} \ --delete-disks=all \ --quiet #@ end From 067b2b74352f1b8730679afdae891eb6fe8906ec Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 08:27:57 -0700 Subject: [PATCH 043/155] Merge await into create --- ci/templates.lib.yml | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 0d26d07abe..1f03d4d375 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -126,30 +126,9 @@ config: (@=remote_functions() @) - echo "ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip}" -#@ end - -#@ def await_instance(): -task: await -image: task-image -config: - platform: linux - inputs: - - name: instance - params: - run: - path: bash - args: - - -c - #@yaml/text-templated-strings - - | - set -ueo pipefail - (@=google_variables() @) - (@=remote_functions() @) SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" instance_name=$(jq -r '.[0].name' ${instance_file}) - echo "Waiting for ssh on ${instance_name} to be ready." console_file=$(mktemp) console_next=0 @@ -167,8 +146,6 @@ config: echo -n "${console_contents}" fi done - - rm -f ${console_file} #@ end #@ load("@ytt:template", "template") @@ -329,7 +306,6 @@ plan: - get: #@ gci_resource_name(image_family_name(build.image_family)) trigger: true - #@ create_instance(build, config) - - #@ await_instance() - do: - #@ build_task(config.config, build.params) - #@ cpp_unit_test_task(build, config) From e76cdfbf24e6f068f87c49532053a9a86e372ec9 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 08:41:16 -0700 Subject: [PATCH 044/155] Merge await into create --- ci/templates.lib.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 1f03d4d375..14571d5971 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -125,7 +125,6 @@ config: | tee ${instance_file} (@=remote_functions() @) - SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" instance_name=$(jq -r '.[0].name' ${instance_file}) @@ -141,7 +140,7 @@ config: > ${console_file} console_next=$(jq -r '.next' ${console_file}) - console_contents=$(jq -r '.contents' ${console_file}) + console_contents=$(jq -r '.contents' ${console_file} | sed 's/\x1b\[[0-9;]*[a-zA-Z]//g') if [ ! -z "${console_contents}" ]; then echo -n "${console_contents}" fi From c29d9a844058a129e5a58c851937fdb6aa9d8754 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 09:17:59 -0700 Subject: [PATCH 045/155] Download cpack output to package output --- ci/templates.lib.txt | 4 ++++ ci/templates.lib.yml | 11 +++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/ci/templates.lib.txt b/ci/templates.lib.txt index 2701c719e2..09f7b98a9a 100644 --- a/ci/templates.lib.txt +++ b/ci/templates.lib.txt @@ -40,6 +40,10 @@ function remote_shell { ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} "$@" } +function remote_copy { + scp ${SSH_OPTIONS} -i ${ssh_key_file} "${INSTANCE_USER}@${external_ip}:${1}" "$2" +} + function remote_copy_recursive { scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${INSTANCE_USER}@${external_ip}:${1}" "$2" } diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 14571d5971..12d181ac7d 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -183,9 +183,16 @@ config: remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} - remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" + remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" | tee cpack.out - #scp -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip}:build/*.zip package/ + packages=$(awk '/^CPack: - package: / {print $4}' cpack.out) + for package in ${packages}; do + remote_copy ${package} package/ + done + checksums=$(awk '/^CPack: - checksum file: / {print $5}' cpack.out) + for checksum in ${checksums}; do + remote_copy ${checksum} package/ + done #@ end #@ def remote_task(name, config, commands, timeout, params={}, attempts=None): From c7792f344957f0cbca9f127c31371065a34e5cf1 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 10 Jul 2020 20:41:05 -0700 Subject: [PATCH 046/155] Cleanup --- ci/templates.lib.yml | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 12d181ac7d..49ffdcc58d 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -91,13 +91,6 @@ config: (@= google_variables() @) (@= instance_variables() @) - BUILD_PREFIX=${BUILD_PREFIX:-build} - BUILD_INSTANCE=${BUILD_INSTANCE:-"${BUILD_PREFIX}-$(uuidgen|tr '[:upper:]' '[:lower:]')"} - BUILD_SUBNET=${BUILD_SUBNET:-default} - BUILD_IMAGE_FILE=${BUILD_IMAGE_FILE:-'image/name'} - - build_image=$(cat ${BUILD_IMAGE_FILE}) - ssh_key_file=${INSTANCE_DIR}/identity ssh_pubkey_file=${ssh_key_file}.pub ssh-keygen -m pem -t rsa -f ${ssh_key_file} -C "${INSTANCE_USER}" -N '' <<< y @@ -106,20 +99,22 @@ config: ssh_keys_file=${INSTANCE_DIR}/ssh_keys_file echo "${INSTANCE_USER}:${ssh_pubkey}" > ${ssh_keys_file} + instance_name=build-$(uuidgen|tr '[:upper:]' '[:lower:]') + image_name=$(cat image/name) time_to_live=$(( $(date +%s) + ( 4 * 60 * 60 ) )) instance_file=${INSTANCE_DIR}/instance.json - gcloud compute instances create ${BUILD_INSTANCE} \ + gcloud compute instances create ${instance_name} \ --format json \ --project=${GOOGLE_PROJECT} \ --zone=${GOOGLE_ZONE} \ - --subnet=${BUILD_SUBNET} \ + --subnet=default \ --machine-type=e2-standard-16 \ --boot-disk-size=200GB \ --boot-disk-type=pd-standard \ - --boot-disk-device-name=${BUILD_INSTANCE} \ + --boot-disk-device-name=${instance_name} \ --image-project=${GOOGLE_PROJECT} \ - --image=${build_image} \ + --image=${image_name} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ --labels=time-to-live=${time_to_live},repository-owner=${LABEL_REPOSITORY_OWNER},repository-project=${LABEL_REPOSITORY_PROJECT},repository-branch=${LABEL_REPOSITORY_BRANCH},build-config=${LABEL_BUILD_CONFIG} \ | tee ${instance_file} @@ -127,7 +122,6 @@ config: (@=remote_functions() @) SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" - instance_name=$(jq -r '.[0].name' ${instance_file}) echo "Waiting for ssh on ${instance_name} to be ready." console_file=$(mktemp) console_next=0 From 1a05a559365e2f9dfd6d60ab1d7c810f37e39ea5 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 30 Jul 2020 14:48:38 -0700 Subject: [PATCH 047/155] WIP self hosting pipeline --- ci/set-pipeline.sh | 13 +++++++++++-- ci/templates.lib.yml | 8 ++++++-- ci/values.yml | 4 ++++ 3 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 ci/values.yml diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index f8b9426de3..523cbbd110 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -5,5 +5,14 @@ set -xeuo pipefail YTT=${YTT:-ytt} FLY=${FLY:-fly} -${YTT} -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml > output.yml -${FLY} -t test set-pipeline -p test -c output.yml +git_branch=$(git rev-parse --abbrev-ref HEAD) +git_tracking_branch=$(git for-each-ref --format='%(upstream:short)' $(git symbolic-ref -q HEAD)) +git_remote=$(echo ${git_tracking_branch} | cut -d/ -f1) +git_remote_url=$(git remote get-url ${git_remote}) + +#git_remote_url=$(git remote get-url origin) +git_remote_url="http://github.com/pivotal-jbarrett/geode-native.git" +git_owner=$(echo ${git_remote_url} | cut -d/ -f1) + +#${YTT} -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml > output.yml +#${FLY} -t test set-pipeline -p test -c output.yml diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 49ffdcc58d..37f55ed445 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -50,8 +50,12 @@ icon: #@ icon #@ return value.lower().replace(".", "-").replace("/", "-") #@ end +#@ def pipeline_prefix(): +#@ return data.values.repository.owner + "-" + hyphenated(data.values.repository.branch) +#@ end + #@ def image_family_name(family): -#@ return (data.values.repository.owner + "-" + hyphenated(data.values.repository.branch) + "-" + family)[0:62] +#@ return (pipeline_prefix() + "-" + family)[0:62] #@ end #@ def gci_resource_name(family): @@ -390,6 +394,6 @@ plan: - source/ci/. - --output-files - pipeline/ - - set_pipeline: test + - set_pipeline: #@ pipeline_prefix() file: pipeline/pipeline.yml #@ end diff --git a/ci/values.yml b/ci/values.yml new file mode 100644 index 0000000000..50a629fb29 --- /dev/null +++ b/ci/values.yml @@ -0,0 +1,4 @@ +#@ load("@ytt:data", "data") +#@ load("@ytt:template", "template") + +--- #@ template.replace([data.read("data.yml")]) From 8690affa58166f4eeec586a51abae4658682809a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 26 Nov 2020 04:30:34 -0800 Subject: [PATCH 048/155] Change github URL to https. --- ci/data.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/data.yml b/ci/data.yml index b317d3aff9..372e42e02d 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -5,7 +5,7 @@ google: zone: us-central1-f repository: - base: http://github.com/ + base: https://github.com/ owner: pivotal-jbarrett project: geode-native branch: wip/images-gcp From f71e71e8fb3f74fe02a93efc5663d11998fd8680 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 26 Nov 2020 06:25:24 -0800 Subject: [PATCH 049/155] Fixes do attempts. --- ci/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 37f55ed445..9b5e82306d 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -193,7 +193,7 @@ config: done #@ end -#@ def remote_task(name, config, commands, timeout, params={}, attempts=None): +#@ def remote_task(name, config, commands, timeout, params={}, attempts=1): task: #@ name timeout: #@ timeout image: task-image From bc872b3ca802c52258a34c461702104a5ab0d143 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 26 Nov 2020 07:57:47 -0800 Subject: [PATCH 050/155] Fix sysprep hang. --- packer/build-windows-2019-vs-2017.json | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/packer/build-windows-2019-vs-2017.json b/packer/build-windows-2019-vs-2017.json index 4b81d74384..7ef2682585 100644 --- a/packer/build-windows-2019-vs-2017.json +++ b/packer/build-windows-2019-vs-2017.json @@ -134,6 +134,16 @@ "amazon-ebs" ] }, + { + "type": "windows-restart", + "restart_timeout": "30m" + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" + ] + }, { "type": "powershell", "inline": [ @@ -142,12 +152,6 @@ "only": [ "googlecompute" ] - }, - { - "type": "powershell", - "scripts": [ - "windows/cleanup.ps1" - ] } ] } From 5fe818b0c3960e78ca01adda254f088ade8d1a99 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 27 Nov 2020 13:24:54 -0800 Subject: [PATCH 051/155] Cleanup and license. --- ci/README.md | 49 +++++++++++++++++++++----- ci/data.yml | 19 ++++++++-- ci/pipeline.yml | 15 ++++++++ ci/set-pipeline.sh | 84 ++++++++++++++++++++++++++++++++++++++------ ci/templates.lib.txt | 15 ++++++++ ci/templates.lib.yml | 17 ++++++++- ci/values.yml | 15 ++++++++ 7 files changed, 192 insertions(+), 22 deletions(-) diff --git a/ci/README.md b/ci/README.md index 50d240be4a..01b4226537 100644 --- a/ci/README.md +++ b/ci/README.md @@ -1,20 +1,53 @@ # Publish Pipeline ```console -./set-pipeline.sh +./set-pipeline.sh --help ``` -Produces output.yml side effect. -Set `FLY=/path/to/fly` to select version of fly. +## Example +Given the local repository looks like the following. +```console +$ git remote get-url origin +git@github.com:some-user/geode-native.git + +$ git branch --show-current +wip/some-branch +``` + +### Typical +The most typical usage should require nothing more than the Concourse target, unless that happens to be named "default". +Everything else has reasonable defaults based on the currently checked out branch. +```console +$ ./set-pipeline.sh --target=test +``` +Executes `fly` from the path setting pipeline to target `test` for remote repository `git@github.com:some-user/geode-native.git`. +Pipeline name will be `some-user-wip-something` -# Steps -1. Creates instances -2. Waits for instance to be accessible +### Alternative repository URL and fly version +Sometimes you will have to support multiple versions of Concourse `fly` or need to fetch sources via https. +```console +$ ./set-pipeline.sh \ + --fly=/path/to/fly \ + --target=test \ + --repository=https://github.com/some-user/geode-native.git +``` +Executes fly at `/path/to/fly` setting pipeline to target `test` for remote repository `https://github.com/some-user/geode-native.git`. +Pipeline name will be `some-user-wip-something` + +# Pipeline Steps +1. Creates VM instances +2. Waits for VM instance to be accessible 3. Builds and packages 4. Runs all tests 5. If anything fails it downloads the build directory for later analysis -6. Deletes the instance +6. Deletes the VM instances + +# Details +This Coucourse pipeline is rendered using [`ytt`](https://get-ytt.io). +## Dependencies +* [Concourse](https://concourse-ci.org) v6.5.0+ +* [`ytt`](https://get-ytt.io) v0.28.0+ #TODO +## Concourse Installation * helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 -* ~/Downloads/fly -t test set-pipeline -p test -c ../../ci/pipeline.yml * kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main diff --git a/ci/data.yml b/ci/data.yml index 372e42e02d..2399f5da12 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -1,3 +1,18 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + #@data/values --- google: @@ -6,9 +21,9 @@ google: repository: base: https://github.com/ - owner: pivotal-jbarrett + owner: apache project: geode-native - branch: wip/images-gcp + branch: develop default: build: diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 26b6820ed0..2fdcedd97b 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -12,6 +12,21 @@ #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + #@ def extends_build(build): #@ return struct.make(**overlay.apply(data.values.default.build, build)) #@ end diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index 523cbbd110..8e369efc9a 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -1,18 +1,80 @@ #!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. set -xeuo pipefail -YTT=${YTT:-ytt} -FLY=${FLY:-fly} +function printHelp() { + cat << EOF +$0 Usage: +Sets Concourse pipeline for Geode Native builds. -git_branch=$(git rev-parse --abbrev-ref HEAD) -git_tracking_branch=$(git for-each-ref --format='%(upstream:short)' $(git symbolic-ref -q HEAD)) -git_remote=$(echo ${git_tracking_branch} | cut -d/ -f1) -git_remote_url=$(git remote get-url ${git_remote}) +Options: +Environment Variable Parameter Description Default +target --target Fly target. "default" +branch --branch Branch to build. Current checked out branch. +repository --repository Remote URL for repository. Current tracking branch repository. +pipeline --pipeline Name of pipeline to set. Based on repository owner name and branch. +fly --fly Path to fly executable. "fly" +ytt --ytt Path to ytt executable. "ytt" +output --output Rendered pipeline file. Temporary file. +EOF +} -#git_remote_url=$(git remote get-url origin) -git_remote_url="http://github.com/pivotal-jbarrett/geode-native.git" -git_owner=$(echo ${git_remote_url} | cut -d/ -f1) +while [ $# -gt 0 ]; do + if [[ $1 == "--help" ]]; then + printHelp; + exit 0; + elif [[ $1 == *"--"*"="* ]]; then + param="${1%%=*}" + param="${param#--}" + declare ${param//[^[:word:]]/_}="${1#--*=}" + elif [[ $1 == *"--"* ]]; then + param="${1/--/}" + declare ${param//[^[:word:]]/_}="${2}" + shift + fi + shift +done -#${YTT} -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml > output.yml -#${FLY} -t test set-pipeline -p test -c output.yml +ytt=${ytt:-ytt} +fly=${fly:-fly} + +target=${target:-default} +output=${output:-$(mktemp)} + +branch=${branch:-$(git rev-parse --abbrev-ref HEAD)} +git_tracking_branch=${git_tracking_branch:-$(git for-each-ref --format='%(upstream:short)' $(git symbolic-ref -q HEAD))} +git_remote=${git_remote:-$(echo ${git_tracking_branch} | cut -d/ -f1)} +repository=${repository:-$(git remote get-url ${git_remote})} + +if [[ ${repository} =~ ^((https|git)(:\/\/|@)([^\/:]+)[\/:])([^\/:]+)\/(.+).git$ ]]; then + git_base=${BASH_REMATCH[1]} + git_owner=${BASH_REMATCH[5]} + git_project=${BASH_REMATCH[6]} +fi + +bash -c "${ytt} \$@" ytt -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml \ + --data-value repository.base=${git_base} \ + --data-value repository.owner=${git_owner} \ + --data-value repository.project=${git_project} \ + --data-value repository.branch=${branch} \ + > ${output} + +pipeline=${pipeline:-${git_owner}-${branch}} +pipeline=${pipeline//[^[:word:]-]/-} + +bash -c "${fly} \$@" fly --target=${target} \ + set-pipeline --pipeline=${pipeline} --config=${output} diff --git a/ci/templates.lib.txt b/ci/templates.lib.txt index 09f7b98a9a..0ab8105dee 100644 --- a/ci/templates.lib.txt +++ b/ci/templates.lib.txt @@ -1,3 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + (@ load("@ytt:data", "data") -@) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 9b5e82306d..0c1106ae7a 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -1,3 +1,18 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + #@ load("@ytt:data", "data") #@ load("templates.lib.txt", @@ -394,6 +409,6 @@ plan: - source/ci/. - --output-files - pipeline/ - - set_pipeline: #@ pipeline_prefix() + - set_pipeline: self file: pipeline/pipeline.yml #@ end diff --git a/ci/values.yml b/ci/values.yml index 50a629fb29..8cbc408fca 100644 --- a/ci/values.yml +++ b/ci/values.yml @@ -1,3 +1,18 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + #@ load("@ytt:data", "data") #@ load("@ytt:template", "template") From ef283ff24dd759949b81d13e9f6348133940723d Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 27 Nov 2020 13:38:37 -0800 Subject: [PATCH 052/155] Fixes set_pipeline --- ci/pipeline.yml | 2 +- ci/templates.lib.yml | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 2fdcedd97b..f91fad7a26 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -55,4 +55,4 @@ jobs: - #@ build_job(extends_build(build), config) - #@ packer_job(extends_build(build)) #@ end - - #@ update_pipeline_job() + - #@ update_pipeline_job(data.values.repository) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 0c1106ae7a..dfbe52b216 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -382,7 +382,7 @@ plan: #@ end -#@ def update_pipeline_job(): +#@ def update_pipeline_job(repository): name: update-pipeline serial: true plan: @@ -408,6 +408,10 @@ plan: - --file - source/ci/. - --output-files + - #@ "--data-value repository.base=" + repository.base + - #@ "--data-value repository.owner=" + repository.owner + - #@ "--data-value repository.project=" + repository.project + - #@ "--data-value repository.branch=" + repository.branch - pipeline/ - set_pipeline: self file: pipeline/pipeline.yml From 38d8c7035ae67afca6355a2c3358cbfb0166897a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 27 Nov 2020 13:40:52 -0800 Subject: [PATCH 053/155] Fixes set_pipeline --- ci/templates.lib.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index dfbe52b216..11563dbbd1 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -408,10 +408,14 @@ plan: - --file - source/ci/. - --output-files - - #@ "--data-value repository.base=" + repository.base - - #@ "--data-value repository.owner=" + repository.owner - - #@ "--data-value repository.project=" + repository.project - - #@ "--data-value repository.branch=" + repository.branch + - --data-value + - #@ "repository.base=" + repository.base + - --data-value + - #@ "repository.owner=" + repository.owner + - --data-value + - #@ "repository.project=" + repository.project + - --data-value + - #@ "repository.branch=" + repository.branch - pipeline/ - set_pipeline: self file: pipeline/pipeline.yml From c12976b1d46ed320130a417030594b9afd5c6a03 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 27 Nov 2020 13:44:07 -0800 Subject: [PATCH 054/155] Fixes set_pipeline --- ci/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 11563dbbd1..13ecce2e4a 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -407,7 +407,6 @@ plan: args: - --file - source/ci/. - - --output-files - --data-value - #@ "repository.base=" + repository.base - --data-value @@ -416,6 +415,7 @@ plan: - #@ "repository.project=" + repository.project - --data-value - #@ "repository.branch=" + repository.branch + - --output-files - pipeline/ - set_pipeline: self file: pipeline/pipeline.yml From 512fa6c6a34ec4e02e0625781eba012433f1f03f Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 28 Nov 2020 06:24:50 -0800 Subject: [PATCH 055/155] Cleanup redundant data values. --- ci/data.yml | 12 ++++++++---- ci/pipeline.yml | 11 +++++------ ci/set-pipeline.sh | 11 +++++------ ci/templates.lib.yml | 30 ++++++++++-------------------- 4 files changed, 28 insertions(+), 36 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index 2399f5da12..d77e20153e 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -15,15 +15,19 @@ #@data/values --- -google: - project: gemfire-dev - zone: us-central1-f +pipeline: + name: develop repository: + url: https://github.com/apache/geode-native + branch: develop base: https://github.com/ owner: apache project: geode-native - branch: develop + +google: + project: gemfire-dev + zone: us-central1-f default: build: diff --git a/ci/pipeline.yml b/ci/pipeline.yml index f91fad7a26..7a088eed71 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -6,8 +6,7 @@ #@ "image_family_name", #@ "build_job", #@ "packer_job", -#@ "update_pipeline_job", -#@ "repository_url") +#@ "update_pipeline_job") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") @@ -41,9 +40,9 @@ resources: - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ docker_image_resource("packer-image", "hashicorp/packer") - #@ docker_image_resource("ytt-image", "gerritk/ytt") - - #@ git_resource("source", repository_url(data.values.repository), data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - - #@ git_resource("ci-source", repository_url(data.values.repository), data.values.repository.branch, ["ci/*"]) - - #@ git_resource("packer-source", repository_url(data.values.repository), data.values.repository.branch, ["packer/*"]) + - #@ git_resource("source", data.values.repository.url, data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) + - #@ git_resource("ci-source", data.values.repository.url, data.values.repository.branch, ["ci/*"]) + - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) #@ for build in data.values.builds: - #@ gci_resource(image_family_name(extends_build(build).image_family)) - #@ gci_resource(extends_build(build).source_image_family) @@ -55,4 +54,4 @@ jobs: - #@ build_job(extends_build(build), config) - #@ packer_job(extends_build(build)) #@ end - - #@ update_pipeline_job(data.values.repository) + - #@ update_pipeline_job() diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index 8e369efc9a..1206fc12c6 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -66,15 +66,14 @@ if [[ ${repository} =~ ^((https|git)(:\/\/|@)([^\/:]+)[\/:])([^\/:]+)\/(.+).git$ git_project=${BASH_REMATCH[6]} fi +pipeline=${pipeline:-${git_owner}-${branch}} +pipeline=${pipeline//[^[:word:]-]/-} + bash -c "${ytt} \$@" ytt -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml \ - --data-value repository.base=${git_base} \ - --data-value repository.owner=${git_owner} \ - --data-value repository.project=${git_project} \ + --data-value pipeline.name=${pipeline} \ + --data-value repository.url=${repository} \ --data-value repository.branch=${branch} \ > ${output} -pipeline=${pipeline:-${git_owner}-${branch}} -pipeline=${pipeline//[^[:word:]-]/-} - bash -c "${fly} \$@" fly --target=${target} \ set-pipeline --pipeline=${pipeline} --config=${output} diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 13ecce2e4a..c48b120670 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -26,10 +26,6 @@ #@ "run_net_integration_tests", #@ "run_net_legacy_integration_tests") -#@ def repository_url(repository): -#@ return repository.base + repository.owner + "/" + repository.project + ".git" -#@ end - #@ def resource(name, type, source=None, icon=None): name: #@ name type: #@ type @@ -65,12 +61,8 @@ icon: #@ icon #@ return value.lower().replace(".", "-").replace("/", "-") #@ end -#@ def pipeline_prefix(): -#@ return data.values.repository.owner + "-" + hyphenated(data.values.repository.branch) -#@ end - #@ def image_family_name(family): -#@ return (pipeline_prefix() + "-" + family)[0:62] +#@ return (data.values.pipeline.name + "-" + family)[0:62] #@ end #@ def gci_resource_name(family): @@ -96,8 +88,8 @@ config: outputs: - name: instance params: - LABEL_REPOSITORY_OWNER: #@ gci_label_value(data.values.repository.owner) - LABEL_REPOSITORY_PROJECT: #@ gci_label_value(data.values.repository.project) + LABEL_PIPELINE_NAME: #@ gci_label_value(data.values.pipeline.name) + LABEL_REPOSITORY_URL: #@ gci_label_value(data.values.repository.url) LABEL_REPOSITORY_BRANCH: #@ gci_label_value(data.values.repository.branch) LABEL_BUILD_CONFIG: #@ config.name run: @@ -135,7 +127,7 @@ config: --image-project=${GOOGLE_PROJECT} \ --image=${image_name} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ - --labels=time-to-live=${time_to_live},repository-owner=${LABEL_REPOSITORY_OWNER},repository-project=${LABEL_REPOSITORY_PROJECT},repository-branch=${LABEL_REPOSITORY_BRANCH},build-config=${LABEL_BUILD_CONFIG} \ + --labels=time-to-live=${time_to_live},pipeline-name=${LABEL_PIPELINE_NAME},repository-branch=${LABEL_REPOSITORY_URL},repository-branch=${LABEL_REPOSITORY_BRANCH},build-config=${LABEL_BUILD_CONFIG} \ | tee ${instance_file} (@=remote_functions() @) @@ -373,8 +365,8 @@ plan: cd source/packer packer build -only=googlecompute \ -var-file=default.json \ - -var owner=(@= gci_label_value(data.values.repository.owner) @) \ - -var project=(@= gci_label_value(data.values.repository.project) @) \ + -var pipeline=(@= gci_label_value(data.values.pipeline.name) @) \ + -var repository=(@= gci_label_value(data.values.repository.url) @) \ -var branch=(@= gci_label_value(data.values.repository.branch) @) \ -var image_name_prefix=(@= image_family_name(build.image_family)[0:51] @) \ -var image_family=(@= image_family_name(build.image_family) @) \ @@ -382,7 +374,7 @@ plan: #@ end -#@ def update_pipeline_job(repository): +#@ def update_pipeline_job(): name: update-pipeline serial: true plan: @@ -408,13 +400,11 @@ plan: - --file - source/ci/. - --data-value - - #@ "repository.base=" + repository.base - - --data-value - - #@ "repository.owner=" + repository.owner + - #@ "pipeline.name=" + data.values.pipeline.name - --data-value - - #@ "repository.project=" + repository.project + - #@ "repository.url=" + data.values.repository.url - --data-value - - #@ "repository.branch=" + repository.branch + - #@ "repository.branch=" + data.values.repository.branch - --output-files - pipeline/ - set_pipeline: self From 570c525bdb26c407fe2d0e43f574f0ae48f9886a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 28 Nov 2020 07:57:04 -0800 Subject: [PATCH 056/155] Fixes labels --- ci/templates.lib.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index c48b120670..6494eddede 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -89,8 +89,6 @@ config: - name: instance params: LABEL_PIPELINE_NAME: #@ gci_label_value(data.values.pipeline.name) - LABEL_REPOSITORY_URL: #@ gci_label_value(data.values.repository.url) - LABEL_REPOSITORY_BRANCH: #@ gci_label_value(data.values.repository.branch) LABEL_BUILD_CONFIG: #@ config.name run: path: bash @@ -127,7 +125,7 @@ config: --image-project=${GOOGLE_PROJECT} \ --image=${image_name} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ - --labels=time-to-live=${time_to_live},pipeline-name=${LABEL_PIPELINE_NAME},repository-branch=${LABEL_REPOSITORY_URL},repository-branch=${LABEL_REPOSITORY_BRANCH},build-config=${LABEL_BUILD_CONFIG} \ + --labels=time-to-live=${time_to_live},pipeline-name=${LABEL_PIPELINE_NAME},build-config=${LABEL_BUILD_CONFIG} \ | tee ${instance_file} (@=remote_functions() @) From ee878a77b0da0c22e6ac9abc05f6e6dc72d81b4d Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 28 Nov 2020 08:03:50 -0800 Subject: [PATCH 057/155] Rename .net to net to avoid deprecation warning. --- ci/templates.lib.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 6494eddede..e60521235b 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -239,15 +239,15 @@ config: #@ end #@ def net_unit_test_task(build, config): -#@ return remote_task(".net-unit-tests", config.config, run_net_unit_tests(), "5m", build.params) +#@ return remote_task("net-unit-tests", config.config, run_net_unit_tests(), "5m", build.params) #@ end #@ def net_integration_test_task(build, config): -#@ return remote_task(".net-integration-tests", config.config, run_net_integration_tests(), "30m", build.params) +#@ return remote_task("net-integration-tests", config.config, run_net_integration_tests(), "30m", build.params) #@ end #@ def net_legacy_integration_test_task(build, config): -#@ return remote_task(".net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) +#@ return remote_task("net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) #@ end #@ def download_build_task(): From 96ce1ded14e880cfa7afdc7c26a0884974e6f4b8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 28 Nov 2020 08:40:58 -0800 Subject: [PATCH 058/155] Groups builds. --- ci/pipeline.yml | 51 +++++++++++++++++++++++++++++++++----------- ci/templates.lib.yml | 12 +++++++++-- 2 files changed, 48 insertions(+), 15 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 7a088eed71..2cd0a3fb4e 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -1,16 +1,3 @@ -#@ load("templates.lib.yml", -#@ "gcr_image_resource", -#@ "git_resource", -#@ "gci_resource", -#@ "docker_image_resource", -#@ "image_family_name", -#@ "build_job", -#@ "packer_job", -#@ "update_pipeline_job") -#@ load("@ytt:data", "data") -#@ load("@ytt:overlay", "overlay") -#@ load("@ytt:struct", "struct") - #! Licensed to the Apache Software Foundation (ASF) under one or more #! contributor license agreements. See the NOTICE file distributed with #! this work for additional information regarding copyright ownership. @@ -26,6 +13,21 @@ #! See the License for the specific language governing permissions and #! limitations under the License. +#@ load("templates.lib.yml", +#@ "gcr_image_resource", +#@ "git_resource", +#@ "gci_resource", +#@ "docker_image_resource", +#@ "image_family_name", +#@ "build_job", +#@ "build_job_name", +#@ "packer_job", +#@ "packer_job_name", +#@ "update_pipeline_job") +#@ load("@ytt:data", "data") +#@ load("@ytt:overlay", "overlay") +#@ load("@ytt:struct", "struct") + #@ def extends_build(build): #@ return struct.make(**overlay.apply(data.values.default.build, build)) #@ end @@ -55,3 +57,26 @@ jobs: - #@ packer_job(extends_build(build)) #@ end - #@ update_pipeline_job() + +groups: + - name: builds + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + #@ end + - name: images + jobs: + #@ for/end build in data.values.builds: + - #@ packer_job_name(extends_build(build)) + - name: meta + jobs: + - update-pipeline + - name: all + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + - #@ packer_job_name(extends_build(build)) + #@ end + - update-pipeline diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index e60521235b..976299d7bf 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -299,8 +299,12 @@ config: --quiet #@ end +#@ def build_job_name(build, config): +#@ return "build-" + build.name + "-" + config.name +#@ end + #@ def build_job(build, config): -name: #@ "build-" + build.name + "-" + config.name +name: #@ build_job_name(build, config) plan: - in_parallel: fail_fast: true @@ -334,8 +338,12 @@ ensure: #@ delete_instance() #@ return hyphenated(value[0:62]) #@ end +#@ def packer_job_name(build): +#@ return "packer-" + build.image_family +#@ end + #@ def packer_job(build): -name: #@ "packer-" + build.image_family +name: #@ packer_job_name(build) plan: - in_parallel: fail_fast: true From f616274ee66089ac7c113f621f1f40b8380df0f3 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 28 Nov 2020 20:00:10 -0800 Subject: [PATCH 059/155] Removes redundant fields. --- ci/data.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index d77e20153e..dbfcd18dfb 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -21,9 +21,6 @@ pipeline: repository: url: https://github.com/apache/geode-native branch: develop - base: https://github.com/ - owner: apache - project: geode-native google: project: gemfire-dev From 1644ee9238f25af0cb3d3c8ac9f99e3732790cd8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 28 Nov 2020 20:22:48 -0800 Subject: [PATCH 060/155] Override Google Cloud settings. --- ci/README.md | 4 +++- ci/data.yml | 2 +- ci/set-pipeline.sh | 23 +++++++++++++++-------- ci/templates.lib.yml | 4 ++++ 4 files changed, 23 insertions(+), 10 deletions(-) diff --git a/ci/README.md b/ci/README.md index 01b4226537..d3197a4b96 100644 --- a/ci/README.md +++ b/ci/README.md @@ -42,10 +42,12 @@ Pipeline name will be `some-user-wip-something` 6. Deletes the VM instances # Details -This Coucourse pipeline is rendered using [`ytt`](https://get-ytt.io). +This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `git` and `gcloud`. ## Dependencies * [Concourse](https://concourse-ci.org) v6.5.0+ * [`ytt`](https://get-ytt.io) v0.28.0+ +* [`git`](https://git-scm.com) v2.25.2+ +* [`gcloud`](https://cloud.google.com/sdk/docs/install) SDK #TODO ## Concourse Installation diff --git a/ci/data.yml b/ci/data.yml index dbfcd18dfb..f943eae6f6 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -23,7 +23,7 @@ repository: branch: develop google: - project: gemfire-dev + project: apachegeode-ci zone: us-central1-f default: diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index 1206fc12c6..65751c614f 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -22,14 +22,16 @@ $0 Usage: Sets Concourse pipeline for Geode Native builds. Options: -Environment Variable Parameter Description Default -target --target Fly target. "default" -branch --branch Branch to build. Current checked out branch. -repository --repository Remote URL for repository. Current tracking branch repository. -pipeline --pipeline Name of pipeline to set. Based on repository owner name and branch. -fly --fly Path to fly executable. "fly" -ytt --ytt Path to ytt executable. "ytt" -output --output Rendered pipeline file. Temporary file. +Environment Var Parameter Description Default +target --target Fly target. "default" +branch --branch Branch to build. Current checked out branch. +repository --repository Remote URL for repository. Current tracking branch repository. +pipeline --pipeline Name of pipeline to set. Based on repository owner name and branch. +google_zone --google-zone Google Compute project. Current default project. +google_project --google-project Google Compute zone. Current default zone. +fly --fly Path to fly executable. "fly" +ytt --ytt Path to ytt executable. "ytt" +output --output Rendered pipeline file. Temporary file. EOF } @@ -69,10 +71,15 @@ fi pipeline=${pipeline:-${git_owner}-${branch}} pipeline=${pipeline//[^[:word:]-]/-} +google_project=${google_project:-$(gcloud config get-value project)} +google_zone=${google_zone:-$(gcloud config get-value compute/zone)} + bash -c "${ytt} \$@" ytt -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml \ --data-value pipeline.name=${pipeline} \ --data-value repository.url=${repository} \ --data-value repository.branch=${branch} \ + --data-value google.project=${google_project} \ + --data-value google.zone=${google_zone} \ > ${output} bash -c "${fly} \$@" fly --target=${target} \ diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 976299d7bf..ee315ab20c 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -411,6 +411,10 @@ plan: - #@ "repository.url=" + data.values.repository.url - --data-value - #@ "repository.branch=" + data.values.repository.branch + - --data-value + - #@ "google.project=" + data.values.google.project + - --data-value + - #@ "google.zone=" + data.values.google.zone - --output-files - pipeline/ - set_pipeline: self From fe214ece351cf903eacef13a763ef04a4db407a9 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 13:08:57 -0800 Subject: [PATCH 061/155] Simplify on gcloud SDK image. Remove dependency on jq. --- ci/pipeline.yml | 1 + ci/templates.lib.txt | 4 ++-- ci/templates.lib.yml | 28 +++++++++++++++------------- 3 files changed, 18 insertions(+), 15 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 2cd0a3fb4e..5efa9a3223 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -39,6 +39,7 @@ resource_types: repository: smgoller/gci-resource resources: + - #@ gcr_image_resource("gcloud-image", "gcr.io/google.com/cloudsdktool/cloud-sdk", "alpine") - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") - #@ docker_image_resource("packer-image", "hashicorp/packer") - #@ docker_image_resource("ytt-image", "gerritk/ytt") diff --git a/ci/templates.lib.txt b/ci/templates.lib.txt index 0ab8105dee..1f23d3e806 100644 --- a/ci/templates.lib.txt +++ b/ci/templates.lib.txt @@ -43,13 +43,13 @@ if [ ! -r "${ssh_key_file}" ]; then exit 1 fi -instance_file=${INSTANCE_DIR}/instance.json +instance_file=${INSTANCE_DIR}/instance.sh if [ ! -r "${instance_file}" ]; then echo "${instance_file} not readable." exit 1 fi -external_ip=$(jq -r '.[0].networkInterfaces[0].accessConfigs[0].natIP' ${instance_file}) +external_ip=$(source ${instance_file} && echo -n ${networkInterfaces_accessConfigs_natIP}) function remote_shell { ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} "$@" diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index ee315ab20c..c6299b4554 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -79,7 +79,7 @@ icon: #@ icon #@ def create_instance(build, config): task: create -image: task-image +image: gcloud-image config: platform: linux inputs: @@ -108,13 +108,13 @@ config: ssh_keys_file=${INSTANCE_DIR}/ssh_keys_file echo "${INSTANCE_USER}:${ssh_pubkey}" > ${ssh_keys_file} - instance_name=build-$(uuidgen|tr '[:upper:]' '[:lower:]') + instance_name=build-$(cat /proc/sys/kernel/random/uuid) image_name=$(cat image/name) time_to_live=$(( $(date +%s) + ( 4 * 60 * 60 ) )) - instance_file=${INSTANCE_DIR}/instance.json + instance_file=${INSTANCE_DIR}/instance.sh gcloud compute instances create ${instance_name} \ - --format json \ + --format='config[export](name,networkInterfaces[0].accessConfigs[0].natIP)' \ --project=${GOOGLE_PROJECT} \ --zone=${GOOGLE_ZONE} \ --subnet=default \ @@ -126,9 +126,9 @@ config: --image=${image_name} \ --metadata-from-file ssh-keys=${ssh_keys_file} \ --labels=time-to-live=${time_to_live},pipeline-name=${LABEL_PIPELINE_NAME},build-config=${LABEL_BUILD_CONFIG} \ - | tee ${instance_file} + > ${instance_file} - (@=remote_functions() @) + (@= remote_functions() @) SSH_OPTIONS="${SSH_OPTIONS} -o ConnectTimeout=10" echo "Waiting for ssh on ${instance_name} to be ready." @@ -139,13 +139,14 @@ config: --start ${console_next} \ --project=${GOOGLE_PROJECT} \ --zone=${GOOGLE_ZONE} \ - --format json \ + --format='value[separator=" + "](next,contents)' \ > ${console_file} - console_next=$(jq -r '.next' ${console_file}) - console_contents=$(jq -r '.contents' ${console_file} | sed 's/\x1b\[[0-9;]*[a-zA-Z]//g') - if [ ! -z "${console_contents}" ]; then - echo -n "${console_contents}" + tmp_next=$(head -n 1 ${console_file}) + if (( tmp_next != console_next )); then + console_next=${tmp_next} + sed '1d;s/\x1b\[[0-9;]*J//g' fi done #@ end @@ -274,7 +275,7 @@ config: #@ def delete_instance(): task: delete -image: task-image +image: gcloud-image config: platform: linux inputs: @@ -290,7 +291,7 @@ config: (@= google_variables() @) (@= remote_functions() @) - instance_name=$(jq -r '.[0].name' ${instance_file}) + instance_name=$(source ${instance_file} && echo -n ${name}) gcloud compute instances delete ${instance_name} \ --project=${GOOGLE_PROJECT} \ @@ -315,6 +316,7 @@ plan: - in_parallel: fail_fast: true steps: + - get: gcloud-image - get: task-image - get: #@ gci_resource_name(image_family_name(build.image_family)) trigger: true From 8d14c2964f8eae286c1c86edc7e0232d979785f0 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 13:23:18 -0800 Subject: [PATCH 062/155] Delete unused file. --- ci/values.yml | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 ci/values.yml diff --git a/ci/values.yml b/ci/values.yml deleted file mode 100644 index 8cbc408fca..0000000000 --- a/ci/values.yml +++ /dev/null @@ -1,19 +0,0 @@ -#! Licensed to the Apache Software Foundation (ASF) under one or more -#! contributor license agreements. See the NOTICE file distributed with -#! this work for additional information regarding copyright ownership. -#! The ASF licenses this file to You under the Apache License, Version 2.0 -#! (the "License"); you may not use this file except in compliance with -#! the License. You may obtain a copy of the License at -#! -#! http://www.apache.org/licenses/LICENSE-2.0 -#! -#! Unless required by applicable law or agreed to in writing, software -#! distributed under the License is distributed on an "AS IS" BASIS, -#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -#! See the License for the specific language governing permissions and -#! limitations under the License. - -#@ load("@ytt:data", "data") -#@ load("@ytt:template", "template") - ---- #@ template.replace([data.read("data.yml")]) From f5e8284890450a316a486981bb26376e355086de Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 14:06:57 -0800 Subject: [PATCH 063/155] Fixes ANSI stripping. --- ci/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index c6299b4554..ba271be8ab 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -146,7 +146,7 @@ config: tmp_next=$(head -n 1 ${console_file}) if (( tmp_next != console_next )); then console_next=${tmp_next} - sed '1d;s/\x1b\[[0-9;]*J//g' + sed '1d;s/\x1b\[[0-9;]*[JH]//g' ${console_file} fi done #@ end From c537da02f8798604603203448118efebdcff7f24 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 15:49:13 -0800 Subject: [PATCH 064/155] Use GCR mirror. --- ci/templates.lib.yml | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index ba271be8ab..3370d11b81 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -27,18 +27,21 @@ #@ "run_net_legacy_integration_tests") #@ def resource(name, type, source=None, icon=None): -name: #@ name -type: #@ type -source: #@ source -icon: #@ icon +#@ return { +#@ "name": name, +#@ "type": type, +#@ "source": source, +#@ "icon": icon +#@ } #@ end -#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): +#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None, mirror="https://mirror.gcr.io"): #@ return resource(name, "docker-image", { #@ "repository": repository, #@ "tag": tag, #@ "username": username, -#@ "password": password +#@ "password": password, +#@ "registry_mirror": mirror #@ }, "docker") #@ end From ca3b831051be0fe8229a571c13bc04d75a694c4d Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 16:42:49 -0800 Subject: [PATCH 065/155] Use project specific minimal task image. --- ci/docker/task/Dockerfile | 20 ++++++++++++++++++++ ci/pipeline.yml | 14 ++++++++++++-- ci/templates.lib.yml | 6 +++++- 3 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 ci/docker/task/Dockerfile diff --git a/ci/docker/task/Dockerfile b/ci/docker/task/Dockerfile new file mode 100644 index 0000000000..ae648e4960 --- /dev/null +++ b/ci/docker/task/Dockerfile @@ -0,0 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:latest +LABEL maintainer="Apache Geode " +LABEL description="Minimal image for executing CI tasks." +RUN apk add --no-cache bash git openssh-client diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 5efa9a3223..141324ffec 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -15,6 +15,7 @@ #@ load("templates.lib.yml", #@ "gcr_image_resource", +#@ "project_gcr_image_resource", #@ "git_resource", #@ "gci_resource", #@ "docker_image_resource", @@ -39,8 +40,8 @@ resource_types: repository: smgoller/gci-resource resources: - - #@ gcr_image_resource("gcloud-image", "gcr.io/google.com/cloudsdktool/cloud-sdk", "alpine") - - #@ gcr_image_resource("task-image", "gcr.io/gemfire-dev/gemfire-develop-alpine-tools") + - #@ gcr_image_resource("gcloud-image", "google.com/cloudsdktool/cloud-sdk", "alpine") + - #@ project_gcr_image_resource("task-image", "geode-native-task") - #@ docker_image_resource("packer-image", "hashicorp/packer") - #@ docker_image_resource("ytt-image", "gerritk/ytt") - #@ git_resource("source", data.values.repository.url, data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) @@ -58,6 +59,13 @@ jobs: - #@ packer_job(extends_build(build)) #@ end - #@ update_pipeline_job() + - name: docker-ci + plan: + - get: ci-source + trigger: true + - put: task-image + params: + build: ci/docker/task groups: - name: builds @@ -70,6 +78,7 @@ groups: jobs: #@ for/end build in data.values.builds: - #@ packer_job_name(extends_build(build)) + - docker-ci - name: meta jobs: - update-pipeline @@ -81,3 +90,4 @@ groups: - #@ packer_job_name(extends_build(build)) #@ end - update-pipeline + - docker-ci diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 3370d11b81..5bee6140e0 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -46,7 +46,11 @@ #@ end #@ def gcr_image_resource(name, repository, tag="latest"): -#@ return docker_image_resource(name, repository, tag, "_json_key", "((gcr-json-key))") +#@ return docker_image_resource(name, "gcr.io/" + repository, tag, "_json_key", "((gcr-json-key))", None) +#@ end + +#@ def project_gcr_image_resource(name, repository, tag="latest"): +#@ return gcr_image_resource(name, data.values.google.project + "/" + repository, tag) #@ end #@ def git_resource(name, uri, branch, paths=[], ignore_paths=[], depth=1): From d76a705a3ba4f5104f0551a8380be19fbae32dbe Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 18:19:53 -0800 Subject: [PATCH 066/155] Fixes Dockerfile path and caches. --- ci/pipeline.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 141324ffec..193512c206 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -65,7 +65,8 @@ jobs: trigger: true - put: task-image params: - build: ci/docker/task + build: ci-source/ci/docker/task + cache: true groups: - name: builds From ac8d22f112e2b60858fff53fd64e8b259b56a885 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 19:06:06 -0800 Subject: [PATCH 067/155] Disable mirror --- ci/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 5bee6140e0..b45f5d8817 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -41,7 +41,7 @@ #@ "tag": tag, #@ "username": username, #@ "password": password, -#@ "registry_mirror": mirror +#!@ "registry_mirror": mirror #@ }, "docker") #@ end From d388dfc1cfa3d9087d196ee316f7cad001d9966c Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 19:24:41 -0800 Subject: [PATCH 068/155] Use newer registry-image resource for images not built in this project. --- ci/pipeline.yml | 6 +++--- ci/templates.lib.yml | 23 +++++++++++++++++------ 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 193512c206..9a2faa429c 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -18,7 +18,7 @@ #@ "project_gcr_image_resource", #@ "git_resource", #@ "gci_resource", -#@ "docker_image_resource", +#@ "registry_image_resource", #@ "image_family_name", #@ "build_job", #@ "build_job_name", @@ -42,8 +42,8 @@ resource_types: resources: - #@ gcr_image_resource("gcloud-image", "google.com/cloudsdktool/cloud-sdk", "alpine") - #@ project_gcr_image_resource("task-image", "geode-native-task") - - #@ docker_image_resource("packer-image", "hashicorp/packer") - - #@ docker_image_resource("ytt-image", "gerritk/ytt") + - #@ registry_image_resource("packer-image", "hashicorp/packer") + - #@ registry_image_resource("ytt-image", "gerritk/ytt") - #@ git_resource("source", data.values.repository.url, data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - #@ git_resource("ci-source", data.values.repository.url, data.values.repository.branch, ["ci/*"]) - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index b45f5d8817..8d9f6226d5 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -35,22 +35,33 @@ #@ } #@ end -#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None, mirror="https://mirror.gcr.io"): -#@ return resource(name, "docker-image", { +#@ def registry_image_resource(name, repository, tag="latest", username=None, password=None, mirror="mirror.gcr.io"): +#@ return resource(name, "registry-image", { #@ "repository": repository, #@ "tag": tag, #@ "username": username, #@ "password": password, -#!@ "registry_mirror": mirror -#@ }, "docker") +#@ "registry_mirror": { +#@ "host": mirror +#@ } +#@ }, "docker") #@ end #@ def gcr_image_resource(name, repository, tag="latest"): -#@ return docker_image_resource(name, "gcr.io/" + repository, tag, "_json_key", "((gcr-json-key))", None) +#@ return registry_image_resource(name, "gcr.io/" + repository, tag, "_json_key", "((gcr-json-key))", None) +#@ end + +#@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): +#@ return resource(name, "docker-image", { +#@ "repository": repository, +#@ "tag": tag, +#@ "username": username, +#@ "password": password, +#@ }, "docker") #@ end #@ def project_gcr_image_resource(name, repository, tag="latest"): -#@ return gcr_image_resource(name, data.values.google.project + "/" + repository, tag) +#@ return docker_image_resource(name, "gcr.io/" + data.values.google.project + "/" + repository, tag, "_json_key", "((gcr-json-key))") #@ end #@ def git_resource(name, uri, branch, paths=[], ignore_paths=[], depth=1): From c535134886c10e448331460ee451e141fad2d20e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 19:53:37 -0800 Subject: [PATCH 069/155] Templatize docker builds. --- ci/pipeline.yml | 22 +++++++++------------- ci/templates.lib.yml | 21 ++++++++++++++++++++- 2 files changed, 29 insertions(+), 14 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 9a2faa429c..c242e59d04 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -24,7 +24,10 @@ #@ "build_job_name", #@ "packer_job", #@ "packer_job_name", -#@ "update_pipeline_job") +#@ "docker_job", +#@ "docker_job_name", +#@ "update_pipeline_job", +#@ "update_pipeline_job_name") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") @@ -59,14 +62,7 @@ jobs: - #@ packer_job(extends_build(build)) #@ end - #@ update_pipeline_job() - - name: docker-ci - plan: - - get: ci-source - trigger: true - - put: task-image - params: - build: ci-source/ci/docker/task - cache: true + - #@ docker_job("task-image", "ci-source", "ci/docker/task") groups: - name: builds @@ -79,10 +75,10 @@ groups: jobs: #@ for/end build in data.values.builds: - #@ packer_job_name(extends_build(build)) - - docker-ci + - #@ docker_job_name("task-image") - name: meta jobs: - - update-pipeline + - #@ update_pipeline_job_name() - name: all jobs: #@ for build in data.values.builds: @@ -90,5 +86,5 @@ groups: - #@ build_job_name(extends_build(build), config) - #@ packer_job_name(extends_build(build)) #@ end - - update-pipeline - - docker-ci + - #@ update_pipeline_job_name() + - #@ docker_job_name("task-image") diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 8d9f6226d5..615a1c4685 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -400,8 +400,12 @@ plan: #@ end +#@ def update_pipeline_job_name(): +#@ return "update-pipeline" +#@ end + #@ def update_pipeline_job(): -name: update-pipeline +name: #@ update_pipeline_job_name() serial: true plan: - in_parallel: @@ -440,3 +444,18 @@ plan: - set_pipeline: self file: pipeline/pipeline.yml #@ end + +#@ def docker_job_name(name): +#@ return "docker-" + name +#@ end + +#@ def docker_job(name, source, path): +name: #@ docker_job_name(name) +plan: + - get: #@ source + trigger: true + - put: #@ name + params: + build: #@ source + "/" + path + cache: true +#@ end From 6e0e6b87c2819430951ed8a8c1b33f74676b5598 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 20:38:07 -0800 Subject: [PATCH 070/155] Generates semver. --- ci/pipeline.yml | 2 ++ ci/templates.lib.yml | 15 +++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index c242e59d04..e48abd1c17 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -19,6 +19,7 @@ #@ "git_resource", #@ "gci_resource", #@ "registry_image_resource", +#@ "semver_resource", #@ "image_family_name", #@ "build_job", #@ "build_job_name", @@ -43,6 +44,7 @@ resource_types: repository: smgoller/gci-resource resources: + - #@ semver_resource("version") - #@ gcr_image_resource("gcloud-image", "google.com/cloudsdktool/cloud-sdk", "alpine") - #@ project_gcr_image_resource("task-image", "geode-native-task") - #@ registry_image_resource("packer-image", "hashicorp/packer") diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 615a1c4685..35a281c56d 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -328,6 +328,7 @@ plan: - in_parallel: fail_fast: true steps: + - get: version - get: source trigger: true - do: @@ -459,3 +460,17 @@ plan: build: #@ source + "/" + path cache: true #@ end + +#@ def semver_resource(name): +#@ return resource( +#@ name, +#@ "semver", +#@ { +#@ "initial_version": "1.14.0-build.0", +#@ "driver": "gcs", +#@ "bucket": data.values.google.project + "-semver", +#@ "key": "geode-native/" + data.values.pipeline.name + "/" + name, +#@ "json_key": "((gcr-json-key))" +#@ }, +#@ "numeric-1-box-multiple") +#@ end From 8a764e9995562b8f4514981c33ef7b3d9947d623 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 30 Nov 2020 22:27:46 -0800 Subject: [PATCH 071/155] Generates semver. --- ci/pipeline.yml | 2 +- ci/templates.lib.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index e48abd1c17..a347c34e34 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -44,7 +44,7 @@ resource_types: repository: smgoller/gci-resource resources: - - #@ semver_resource("version") + - #@ semver_resource("version", "1.14.0-build.0") - #@ gcr_image_resource("gcloud-image", "google.com/cloudsdktool/cloud-sdk", "alpine") - #@ project_gcr_image_resource("task-image", "geode-native-task") - #@ registry_image_resource("packer-image", "hashicorp/packer") diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 35a281c56d..b658521292 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -461,12 +461,12 @@ plan: cache: true #@ end -#@ def semver_resource(name): +#@ def semver_resource(name, initialVersion="0.1.0-build.0"): #@ return resource( #@ name, #@ "semver", #@ { -#@ "initial_version": "1.14.0-build.0", +#@ "initial_version": initialVersion, #@ "driver": "gcs", #@ "bucket": data.values.google.project + "-semver", #@ "key": "geode-native/" + data.values.pipeline.name + "/" + name, From f93b7b7ae357a18a3fe9726a713980b4b008e05a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 1 Dec 2020 10:09:01 -0800 Subject: [PATCH 072/155] Fixes semver. --- ci/pipeline.yml | 6 +++++- ci/templates.lib.yml | 25 ++++++++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/ci/pipeline.yml b/ci/pipeline.yml index a347c34e34..4b009022dd 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -28,7 +28,9 @@ #@ "docker_job", #@ "docker_job_name", #@ "update_pipeline_job", -#@ "update_pipeline_job_name") +#@ "update_pipeline_job_name", +#@ "version_source_job", +#@ "version_source_job_name") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") @@ -63,6 +65,7 @@ jobs: - #@ build_job(extends_build(build), config) - #@ packer_job(extends_build(build)) #@ end + - #@ version_source_job() - #@ update_pipeline_job() - #@ docker_job("task-image", "ci-source", "ci/docker/task") @@ -90,3 +93,4 @@ groups: #@ end - #@ update_pipeline_job_name() - #@ docker_job_name("task-image") + - #@ version_source_job_name() diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index b658521292..3756428b90 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -329,8 +329,11 @@ plan: fail_fast: true steps: - get: version + trigger: false + passed: [ version-source ] - get: source - trigger: true + trigger: false + passed: [ version-source ] - do: - in_parallel: fail_fast: true @@ -474,3 +477,23 @@ plan: #@ }, #@ "numeric-1-box-multiple") #@ end + +#@ def version_source_job_name(): +#@ return "version-source" +#@ end + +#@ def version_source_job(): +name: #@ version_source_job_name() +plan: + - in_parallel: + fail_fast: true + steps: + - get: version + params: + pre: build + - get: source + trigger: true + - put: version + params: + file: version/number +#@ end From 45e4a7bf42ba85ae0e2351a6207f0a2f96d201e3 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 1 Dec 2020 10:08:08 -0800 Subject: [PATCH 073/155] Fixes config build type. --- ci/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 3756428b90..adbfc6858d 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -202,7 +202,7 @@ config: remote_shell git clone ${git_url} source remote_shell cmake -E chdir source git checkout ${git_rev} remote_shell cmake -E make_directory build - remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_config=${CMAKE_CONFIG} + remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" | tee cpack.out From 10d02fac70855700a789cb7b059701d3871f7bec Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 1 Dec 2020 10:18:35 -0800 Subject: [PATCH 074/155] Configure version, date, etc. --- ci/templates.lib.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index adbfc6858d..0019fc5eec 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -178,6 +178,7 @@ config: platform: linux inputs: - name: instance + - name: version - name: source outputs: - name: package @@ -199,10 +200,18 @@ config: git_rev=$(git rev-parse HEAD) popd + version=$(cat version/number) + builddate=$(date) + remote_shell git clone ${git_url} source remote_shell cmake -E chdir source git checkout ${git_rev} remote_shell cmake -E make_directory build - remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} + remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} \ + -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} \ + -DPRODUCT_VERSION=${version} \ + -DPRODUCT_BUILDDATE="${builddate}" \ + -DPRODUCT_SOURCE_REVISION=${git_rev} \ + -DPRODUCT_SOURCE_REPOSITORY=${git_url} remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} remote_shell cmake --build build --config ${CMAKE_CONFIG} --target docs -- ${CMAKE_BUILD_FLAGS} remote_shell cmake -E chdir build cpack -C ${CMAKE_CONFIG} -G "${CPACK_GENERATORS}" | tee cpack.out From 20672ca610c37d11abcfac5b8ba0c653c3f50c3c Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 1 Dec 2020 16:00:16 -0800 Subject: [PATCH 075/155] Fixes date --- ci/templates.lib.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 0019fc5eec..59b1bff3f1 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -201,7 +201,7 @@ config: popd version=$(cat version/number) - builddate=$(date) + builddate=$(date "+%Y-%m-%d") remote_shell git clone ${git_url} source remote_shell cmake -E chdir source git checkout ${git_rev} @@ -209,7 +209,7 @@ config: remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} \ -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} \ -DPRODUCT_VERSION=${version} \ - -DPRODUCT_BUILDDATE="${builddate}" \ + -DPRODUCT_BUILDDATE=${builddate} \ -DPRODUCT_SOURCE_REVISION=${git_rev} \ -DPRODUCT_SOURCE_REPOSITORY=${git_url} remote_shell cmake --build build --config ${CMAKE_CONFIG} -- ${CMAKE_BUILD_FLAGS} From de5d3b52fc7506bb26ae4508cca03ae8455f4b46 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 1 Dec 2020 16:00:36 -0800 Subject: [PATCH 076/155] Parameterizes bucket and key. --- ci/data.yml | 3 +++ ci/set-pipeline.sh | 26 ++++++++++++++++---------- ci/templates.lib.yml | 8 ++++++-- 3 files changed, 25 insertions(+), 12 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index f943eae6f6..bb288b5d90 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -25,6 +25,9 @@ repository: google: project: apachegeode-ci zone: us-central1-f + storage: + bucket: apachegeode-ci-concourse + key: geode-native/develop default: build: diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index 65751c614f..7e059830d5 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -22,16 +22,18 @@ $0 Usage: Sets Concourse pipeline for Geode Native builds. Options: -Environment Var Parameter Description Default -target --target Fly target. "default" -branch --branch Branch to build. Current checked out branch. -repository --repository Remote URL for repository. Current tracking branch repository. -pipeline --pipeline Name of pipeline to set. Based on repository owner name and branch. -google_zone --google-zone Google Compute project. Current default project. -google_project --google-project Google Compute zone. Current default zone. -fly --fly Path to fly executable. "fly" -ytt --ytt Path to ytt executable. "ytt" -output --output Rendered pipeline file. Temporary file. +Parameter Description Default +--target Fly target. "default" +--branch Branch to build. Current checked out branch. +--repository Remote URL for repository. Current tracking branch repository. +--pipeline Name of pipeline to set. Based on repository owner name and branch. +--google-zone Google Compute project. Current default project. +--google-project Google Compute zone. Current default zone. +--google-storage-bucket Google Compute Storage bucket. Based on google-project value. +--google-storage-key Google Compute Storage key prefix. Based on pipeline value. +--fly Path to fly executable. "fly" +--ytt Path to ytt executable. "ytt" +--output Rendered pipeline file. Temporary file. EOF } @@ -73,6 +75,8 @@ pipeline=${pipeline//[^[:word:]-]/-} google_project=${google_project:-$(gcloud config get-value project)} google_zone=${google_zone:-$(gcloud config get-value compute/zone)} +google_storage_bucket=${google_storage_bucket:-${google_project}-concourse} +google_storage_key=${google_storage_key:-geode-native/${pipeline}} bash -c "${ytt} \$@" ytt -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml \ --data-value pipeline.name=${pipeline} \ @@ -80,6 +84,8 @@ bash -c "${ytt} \$@" ytt -f pipeline.yml -f templates.lib.yml -f templates.lib.t --data-value repository.branch=${branch} \ --data-value google.project=${google_project} \ --data-value google.zone=${google_zone} \ + --data-value google.storage.bucket=${google_storage_bucket} \ + --data-value google.storage.key=${google_storage_key} \ > ${output} bash -c "${fly} \$@" fly --target=${target} \ diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 59b1bff3f1..39bfad1507 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -452,6 +452,10 @@ plan: - #@ "google.project=" + data.values.google.project - --data-value - #@ "google.zone=" + data.values.google.zone + - --data-value + - #@ "google.storage.bucket=" + data.values.google.storage.bucket + - --data-value + - #@ "google.storage.key=" + data.values.google.storage.key - --output-files - pipeline/ - set_pipeline: self @@ -480,8 +484,8 @@ plan: #@ { #@ "initial_version": initialVersion, #@ "driver": "gcs", -#@ "bucket": data.values.google.project + "-semver", -#@ "key": "geode-native/" + data.values.pipeline.name + "/" + name, +#@ "bucket": data.values.google.storage.bucket, +#@ "key": data.values.google.storage.key + "/" + name, #@ "json_key": "((gcr-json-key))" #@ }, #@ "numeric-1-box-multiple") From 9888b3c70a8e232c05e43643f2fe72f30cf83e28 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 1 Dec 2020 19:07:48 -0800 Subject: [PATCH 077/155] Upload packages to GCS --- ci/data.yml | 6 +++++ ci/pipeline.yml | 27 ++++++++-------------- ci/templates.lib.yml | 54 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 70 insertions(+), 17 deletions(-) diff --git a/ci/data.yml b/ci/data.yml index bb288b5d90..8d7127eee1 100644 --- a/ci/data.yml +++ b/ci/data.yml @@ -35,6 +35,7 @@ default: image_family: #@ None source_image_family: #@ None with_dot_net: #@ False + package_suffix: #@ None params: CMAKE_CONFIGURE_FLAGS: CMAKE_BUILD_FLAGS: "-j16" @@ -49,18 +50,23 @@ builds: CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" CMAKE_BUILD_FLAGS: "/m" CPACK_GENERATORS: "ZIP" + package_suffix: Windows-64bit.zip - name: rhel-7 image_family: build-rhel-7 source_image_family: rhel-7 + package_suffix: Linux-64bit.tar.gz - name: rhel-8 image_family: build-rhel-8 source_image_family: rhel-8 + package_suffix: Linux-64bit.tar.gz - name: ubuntu-16.04 image_family: build-ubuntu-16-04 source_image_family: ubuntu-1604-lts + package_suffix: Linux-64bit.tar.gz - name: ubuntu-18.04 image_family: build-ubuntu-18-04 source_image_family: ubuntu-1804-lts + package_suffix: Linux-64bit.tar.gz configs: - name: debug diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 4b009022dd..e63d0da307 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -17,13 +17,12 @@ #@ "gcr_image_resource", #@ "project_gcr_image_resource", #@ "git_resource", -#@ "gci_resource", +#@ "extends_build", #@ "registry_image_resource", #@ "semver_resource", -#@ "image_family_name", -#@ "build_job", +#@ "build_resources", +#@ "build_jobs", #@ "build_job_name", -#@ "packer_job", #@ "packer_job_name", #@ "docker_job", #@ "docker_job_name", @@ -34,16 +33,17 @@ #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") - -#@ def extends_build(build): -#@ return struct.make(**overlay.apply(data.values.default.build, build)) -#@ end +#@ load("@ytt:template", "template") resource_types: - name: gci type: docker-image source: repository: smgoller/gci-resource + - name: gcs-resource + type: docker-image + source: + repository: frodenas/gcs-resource resources: - #@ semver_resource("version", "1.14.0-build.0") @@ -54,20 +54,13 @@ resources: - #@ git_resource("source", data.values.repository.url, data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - #@ git_resource("ci-source", data.values.repository.url, data.values.repository.branch, ["ci/*"]) - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) - #@ for build in data.values.builds: - - #@ gci_resource(image_family_name(extends_build(build).image_family)) - - #@ gci_resource(extends_build(build).source_image_family) - #@ end + - #@ template.replace(build_resources(data.values.builds, data.values.configs)) jobs: - #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job(extends_build(build), config) - - #@ packer_job(extends_build(build)) - #@ end - #@ version_source_job() - #@ update_pipeline_job() - #@ docker_job("task-image", "ci-source", "ci/docker/task") + - #@ template.replace(build_jobs(data.values.builds, data.values.configs)) groups: - name: builds diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 39bfad1507..27a046d068 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -26,6 +26,13 @@ #@ "run_net_integration_tests", #@ "run_net_legacy_integration_tests") +#@ load("@ytt:overlay", "overlay") +#@ load("@ytt:struct", "struct") + +#@ def extends_build(build): +#@ return struct.make(**overlay.apply(data.values.default.build, build)) +#@ end + #@ def resource(name, type, source=None, icon=None): #@ return { #@ "name": name, @@ -354,6 +361,7 @@ plan: - #@ create_instance(build, config) - do: - #@ build_task(config.config, build.params) + - #@ template.replace(put_package(build, config)) - #@ cpp_unit_test_task(build, config) - #@ cpp_integration_test_task(build, config) - #@ cpp_legacy_integration_test_task(build, config) @@ -510,3 +518,49 @@ plan: params: file: version/number #@ end + + +#@ def package_resource_name(build, config): +#@ return "package-" + build.name + "-" + config.name +#@ end + +#@ def package_resource(build, config): + - name: #@ package_resource_name(build, config) + type: gcs-resource + source: + bucket: #@ data.values.google.storage.bucket + json_key: ((gcr-json-key)) + regexp: #@ data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/apache-geode-native-(?P.*)-" + build.package_suffix + - name: #@ package_resource_name(build, config) + "-hash" + type: gcs-resource + source: + bucket: #@ data.values.google.storage.bucket + json_key: ((gcr-json-key)) + regexp: #@ data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/apache-geode-native-(?P.*)-" + build.package_suffix + ".sha512" +#@ end + +#@ def put_package(build, config): + - put: #@ package_resource_name(build, config) + params: + file: #@ "package/*" + build.package_suffix + - put: #@ package_resource_name(build, config) + "-hash" + params: + file: #@ "package/*" + build.package_suffix + ".sha512" +#@ end + +#@ def build_resources(builds, configs): +#@ for build in builds: + #@ for/end config in configs: + - #@ template.replace(package_resource(extends_build(build), config)) + - #@ gci_resource(image_family_name(extends_build(build).image_family)) + - #@ gci_resource(extends_build(build).source_image_family) +#@ end +#@ end + +#@ def build_jobs(builds, configs): +#@ for build in builds: + #@ for/end config in configs: + - #@ build_job(extends_build(build), config) + - #@ packer_job(extends_build(build)) +#@ end +#@ end From 7da83bf8907afb0b996f60443ff242776d41bb6d Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 04:47:39 -0800 Subject: [PATCH 078/155] Upload packages to GCS --- ci/templates.lib.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/ci/templates.lib.yml b/ci/templates.lib.yml index 27a046d068..bf76862cf0 100644 --- a/ci/templates.lib.yml +++ b/ci/templates.lib.yml @@ -345,10 +345,10 @@ plan: fail_fast: true steps: - get: version - trigger: false + trigger: true passed: [ version-source ] - get: source - trigger: false + trigger: true passed: [ version-source ] - do: - in_parallel: @@ -361,7 +361,6 @@ plan: - #@ create_instance(build, config) - do: - #@ build_task(config.config, build.params) - - #@ template.replace(put_package(build, config)) - #@ cpp_unit_test_task(build, config) - #@ cpp_integration_test_task(build, config) - #@ cpp_legacy_integration_test_task(build, config) @@ -370,6 +369,7 @@ plan: - #@ net_integration_test_task(build, config) - #@ net_legacy_integration_test_task(build, config) #@ end + - #@ template.replace(put_package(build, config)) on_failure: #@ download_build_task() ensure: #@ delete_instance() #@ end @@ -540,12 +540,15 @@ plan: #@ end #@ def put_package(build, config): - - put: #@ package_resource_name(build, config) - params: - file: #@ "package/*" + build.package_suffix - - put: #@ package_resource_name(build, config) + "-hash" - params: - file: #@ "package/*" + build.package_suffix + ".sha512" + - in_parallel: + fail_fast: true + steps: + - put: #@ package_resource_name(build, config) + params: + file: #@ "package/*" + build.package_suffix + - put: #@ package_resource_name(build, config) + "-hash" + params: + file: #@ "package/*" + build.package_suffix + ".sha512" #@ end #@ def build_resources(builds, configs): From a591029f7ab0f13708f169e6c9570024c37ec2ad Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 10:26:34 -0800 Subject: [PATCH 079/155] Split release a pr pipelines. --- ci/base/base.yml | 111 +++++++++++++++++++++++++++++++ ci/{ => base}/pipeline.yml | 34 +--------- ci/data.yml | 75 --------------------- ci/{ => lib}/templates.lib.txt | 0 ci/{ => lib}/templates.lib.yml | 101 ++++++++++++++++++---------- ci/pr/data.yml | 24 +++++++ ci/pr/pipeline.yml | 61 +++++++++++++++++ ci/release/data.yml | 27 ++++++++ ci/release/pipeline.yml | 116 +++++++++++++++++++++++++++++++++ ci/set-pipeline.sh | 40 ++++++++---- 10 files changed, 432 insertions(+), 157 deletions(-) create mode 100644 ci/base/base.yml rename ci/{ => base}/pipeline.yml (68%) delete mode 100644 ci/data.yml rename ci/{ => lib}/templates.lib.txt (100%) rename ci/{ => lib}/templates.lib.yml (90%) create mode 100644 ci/pr/data.yml create mode 100644 ci/pr/pipeline.yml create mode 100644 ci/release/data.yml create mode 100644 ci/release/pipeline.yml diff --git a/ci/base/base.yml b/ci/base/base.yml new file mode 100644 index 0000000000..3995397008 --- /dev/null +++ b/ci/base/base.yml @@ -0,0 +1,111 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + +#@ load("@ytt:overlay", "overlay") + +#@data/values +--- +pipeline: + name: develop + +repository: + url: https://github.com/apache/geode-native + branch: develop + +google: + project: apachegeode-ci + zone: us-central1-f + storage: + bucket: apachegeode-ci-concourse + key: geode-native/develop + +builds: + - name: windows + image_family: build-windows-2019-vs-2017 + source_image_family: windows-2019 + with_dot_net: #@ True + params: + CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" + CMAKE_BUILD_FLAGS: "/m" + CPACK_GENERATORS: "ZIP" + packages: + - name: archive + regexp: apache-geode-native-(?P.*)-Windows-64bit.zip + glob: "*-Windows-64bit.zip" + - name: archive.sha512 + regexp: apache-geode-native-(?P.*)-Windows-64bit.zip.sha512 + glob: "*-Windows-64bit.zip.sha512" + - name: rhel-7 + image_family: build-rhel-7 + source_image_family: rhel-7 + with_dot_net: #@ False + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j16" + CPACK_GENERATORS: "TGZ" + packages: + - name: archive + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz + glob: "*-Linux-64bit.tar.gz" + - name: archive.sha512 + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 + glob: "*-Linux-64bit.tar.gz.sha512" + - name: rhel-8 + image_family: build-rhel-8 + source_image_family: rhel-8 + with_dot_net: #@ False + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j16" + CPACK_GENERATORS: "TGZ" + packages: + - name: archive + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz + glob: "*-Linux-64bit.tar.gz" + - name: archive.sha512 + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 + glob: "*-Linux-64bit.tar.gz.sha512" + - name: ubuntu-16.04 + image_family: build-ubuntu-16-04 + source_image_family: ubuntu-1604-lts + with_dot_net: #@ False + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j16" + CPACK_GENERATORS: "TGZ" + packages: + - name: archive + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz + glob: "*-Linux-64bit.tar.gz" + - name: archive.sha512 + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 + glob: "*-Linux-64bit.tar.gz.sha512" + - name: ubuntu-18.04 + image_family: build-ubuntu-18-04 + source_image_family: ubuntu-1804-lts + with_dot_net: #@ False + params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j16" + CPACK_GENERATORS: "TGZ" + packages: + - name: archive + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz + glob: "*-Linux-64bit.tar.gz" + - name: archive.sha512 + regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 + glob: "*-Linux-64bit.tar.gz.sha512" + +configs: [ ] diff --git a/ci/pipeline.yml b/ci/base/pipeline.yml similarity index 68% rename from ci/pipeline.yml rename to ci/base/pipeline.yml index e63d0da307..5f5bac16da 100644 --- a/ci/pipeline.yml +++ b/ci/base/pipeline.yml @@ -49,41 +49,11 @@ resources: - #@ semver_resource("version", "1.14.0-build.0") - #@ gcr_image_resource("gcloud-image", "google.com/cloudsdktool/cloud-sdk", "alpine") - #@ project_gcr_image_resource("task-image", "geode-native-task") - - #@ registry_image_resource("packer-image", "hashicorp/packer") - - #@ registry_image_resource("ytt-image", "gerritk/ytt") - #@ git_resource("source", data.values.repository.url, data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - - #@ git_resource("ci-source", data.values.repository.url, data.values.repository.branch, ["ci/*"]) - - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) - #@ template.replace(build_resources(data.values.builds, data.values.configs)) + - #@ registry_image_resource("ytt-image", "gerritk/ytt") + - #@ git_resource("ci-source", data.values.repository.url, data.values.repository.branch, ["ci/*"]) jobs: - - #@ version_source_job() - - #@ update_pipeline_job() - - #@ docker_job("task-image", "ci-source", "ci/docker/task") - #@ template.replace(build_jobs(data.values.builds, data.values.configs)) -groups: - - name: builds - jobs: - #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) - #@ end - - name: images - jobs: - #@ for/end build in data.values.builds: - - #@ packer_job_name(extends_build(build)) - - #@ docker_job_name("task-image") - - name: meta - jobs: - - #@ update_pipeline_job_name() - - name: all - jobs: - #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) - - #@ packer_job_name(extends_build(build)) - #@ end - - #@ update_pipeline_job_name() - - #@ docker_job_name("task-image") - - #@ version_source_job_name() diff --git a/ci/data.yml b/ci/data.yml deleted file mode 100644 index 8d7127eee1..0000000000 --- a/ci/data.yml +++ /dev/null @@ -1,75 +0,0 @@ -#! Licensed to the Apache Software Foundation (ASF) under one or more -#! contributor license agreements. See the NOTICE file distributed with -#! this work for additional information regarding copyright ownership. -#! The ASF licenses this file to You under the Apache License, Version 2.0 -#! (the "License"); you may not use this file except in compliance with -#! the License. You may obtain a copy of the License at -#! -#! http://www.apache.org/licenses/LICENSE-2.0 -#! -#! Unless required by applicable law or agreed to in writing, software -#! distributed under the License is distributed on an "AS IS" BASIS, -#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -#! See the License for the specific language governing permissions and -#! limitations under the License. - -#@data/values ---- -pipeline: - name: develop - -repository: - url: https://github.com/apache/geode-native - branch: develop - -google: - project: apachegeode-ci - zone: us-central1-f - storage: - bucket: apachegeode-ci-concourse - key: geode-native/develop - -default: - build: - name: #@ None - image_family: #@ None - source_image_family: #@ None - with_dot_net: #@ False - package_suffix: #@ None - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - -builds: - - name: windows - image_family: build-windows-2019-vs-2017 - source_image_family: windows-2019 - with_dot_net: #@ True - params: - CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" - CMAKE_BUILD_FLAGS: "/m" - CPACK_GENERATORS: "ZIP" - package_suffix: Windows-64bit.zip - - name: rhel-7 - image_family: build-rhel-7 - source_image_family: rhel-7 - package_suffix: Linux-64bit.tar.gz - - name: rhel-8 - image_family: build-rhel-8 - source_image_family: rhel-8 - package_suffix: Linux-64bit.tar.gz - - name: ubuntu-16.04 - image_family: build-ubuntu-16-04 - source_image_family: ubuntu-1604-lts - package_suffix: Linux-64bit.tar.gz - - name: ubuntu-18.04 - image_family: build-ubuntu-18-04 - source_image_family: ubuntu-1804-lts - package_suffix: Linux-64bit.tar.gz - -configs: - - name: debug - config: Debug - - name: release - config: RelWithDebInfo diff --git a/ci/templates.lib.txt b/ci/lib/templates.lib.txt similarity index 100% rename from ci/templates.lib.txt rename to ci/lib/templates.lib.txt diff --git a/ci/templates.lib.yml b/ci/lib/templates.lib.yml similarity index 90% rename from ci/templates.lib.yml rename to ci/lib/templates.lib.yml index bf76862cf0..643dd4a128 100644 --- a/ci/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -29,10 +29,12 @@ #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") +--- #@ def extends_build(build): -#@ return struct.make(**overlay.apply(data.values.default.build, build)) +#@ return build #@ end +--- #@ def resource(name, type, source=None, icon=None): #@ return { #@ "name": name, @@ -42,6 +44,7 @@ #@ } #@ end +--- #@ def registry_image_resource(name, repository, tag="latest", username=None, password=None, mirror="mirror.gcr.io"): #@ return resource(name, "registry-image", { #@ "repository": repository, @@ -54,10 +57,12 @@ #@ }, "docker") #@ end +--- #@ def gcr_image_resource(name, repository, tag="latest"): #@ return registry_image_resource(name, "gcr.io/" + repository, tag, "_json_key", "((gcr-json-key))", None) #@ end +--- #@ def docker_image_resource(name, repository, tag="latest", username=None, password=None): #@ return resource(name, "docker-image", { #@ "repository": repository, @@ -67,10 +72,12 @@ #@ }, "docker") #@ end +--- #@ def project_gcr_image_resource(name, repository, tag="latest"): #@ return docker_image_resource(name, "gcr.io/" + data.values.google.project + "/" + repository, tag, "_json_key", "((gcr-json-key))") #@ end +--- #@ def git_resource(name, uri, branch, paths=[], ignore_paths=[], depth=1): #@ return resource(name, "git", { #@ "branch": branch, @@ -81,19 +88,23 @@ #@ }, "github") #@ end +--- #@ def hyphenated(value): #! TODO [a-z0-9-] #@ return value.lower().replace(".", "-").replace("/", "-") #@ end +--- #@ def image_family_name(family): #@ return (data.values.pipeline.name + "-" + family)[0:62] #@ end +--- #@ def gci_resource_name(family): #@ return family + "-gci" #@ end +--- #@ def gci_resource(family, project=data.values.google.project): #@ return resource(gci_resource_name(family), "gci", { #@ "key": "((gcr-json-key))", @@ -102,6 +113,7 @@ #@ }, "google-cloud") #@ end +--- #@ def create_instance(build, config): task: create image: gcloud-image @@ -176,8 +188,10 @@ config: done #@ end +--- #@ load("@ytt:template", "template") +--- #@ def build_task(config, params={}): task: build image: task-image @@ -233,6 +247,7 @@ config: done #@ end +--- #@ def remote_task(name, config, commands, timeout, params={}, attempts=1): task: #@ name timeout: #@ timeout @@ -257,34 +272,41 @@ config: (@= commands @) #@ end +--- #@ def packer_build_task(build): #@ end - +--- #@ def cpp_unit_test_task(build, config): #@ return remote_task("cpp-unit-tests", config.config, run_cpp_unit_tests(), "5m", build.params) #@ end +--- #@ def cpp_integration_test_task(build, config): #@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) #@ end +--- #@ def cpp_legacy_integration_test_task(build, config): #@ return remote_task("cpp-legacy-integration-tests", config.config, run_cpp_legacy_integration_tests(), "1h", build.params, 5) #@ end +--- #@ def net_unit_test_task(build, config): #@ return remote_task("net-unit-tests", config.config, run_net_unit_tests(), "5m", build.params) #@ end +--- #@ def net_integration_test_task(build, config): #@ return remote_task("net-integration-tests", config.config, run_net_integration_tests(), "30m", build.params) #@ end +--- #@ def net_legacy_integration_test_task(build, config): #@ return remote_task("net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) #@ end +--- #@ def download_build_task(): task: download-build image: task-image @@ -307,6 +329,7 @@ config: remote_copy_recursive build . #@ end +--- #@ def delete_instance(): task: delete image: gcloud-image @@ -334,10 +357,12 @@ config: --quiet #@ end +--- #@ def build_job_name(build, config): #@ return "build-" + build.name + "-" + config.name #@ end +--- #@ def build_job(build, config): name: #@ build_job_name(build, config) plan: @@ -346,10 +371,8 @@ plan: steps: - get: version trigger: true - passed: [ version-source ] - get: source trigger: true - passed: [ version-source ] - do: - in_parallel: fail_fast: true @@ -369,20 +392,22 @@ plan: - #@ net_integration_test_task(build, config) - #@ net_legacy_integration_test_task(build, config) #@ end - - #@ template.replace(put_package(build, config)) on_failure: #@ download_build_task() ensure: #@ delete_instance() #@ end +--- #@ def gci_label_value(value): #! TODO [a-z0-9_-] #@ return hyphenated(value[0:62]) #@ end +--- #@ def packer_job_name(build): #@ return "packer-" + build.image_family #@ end +--- #@ def packer_job(build): name: #@ packer_job_name(build) plan: @@ -421,11 +446,13 @@ plan: #@ end +--- #@ def update_pipeline_job_name(): #@ return "update-pipeline" #@ end -#@ def update_pipeline_job(): +--- +#@ def update_pipeline_job(variant): name: #@ update_pipeline_job_name() serial: true plan: @@ -449,7 +476,11 @@ plan: path: /usr/bin/ytt args: - --file - - source/ci/. + - source/ci/lib + - --file + - source/ci/base + - --file + - #@ "source/ci/" + variant - --data-value - #@ "pipeline.name=" + data.values.pipeline.name - --data-value @@ -470,10 +501,12 @@ plan: file: pipeline/pipeline.yml #@ end +--- #@ def docker_job_name(name): #@ return "docker-" + name #@ end +--- #@ def docker_job(name, source, path): name: #@ docker_job_name(name) plan: @@ -485,6 +518,7 @@ plan: cache: true #@ end +--- #@ def semver_resource(name, initialVersion="0.1.0-build.0"): #@ return resource( #@ name, @@ -499,10 +533,12 @@ plan: #@ "numeric-1-box-multiple") #@ end +--- #@ def version_source_job_name(): #@ return "version-source" #@ end +--- #@ def version_source_job(): name: #@ version_source_job_name() plan: @@ -519,51 +555,44 @@ plan: file: version/number #@ end - -#@ def package_resource_name(build, config): -#@ return "package-" + build.name + "-" + config.name +--- +#@ def package_resource_name(build, config, package): +#@ return "package-" + build.name + "-" + config.name + "-" + package.name #@ end -#@ def package_resource(build, config): - - name: #@ package_resource_name(build, config) - type: gcs-resource - source: - bucket: #@ data.values.google.storage.bucket - json_key: ((gcr-json-key)) - regexp: #@ data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/apache-geode-native-(?P.*)-" + build.package_suffix - - name: #@ package_resource_name(build, config) + "-hash" - type: gcs-resource - source: - bucket: #@ data.values.google.storage.bucket - json_key: ((gcr-json-key)) - regexp: #@ data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/apache-geode-native-(?P.*)-" + build.package_suffix + ".sha512" +--- +#@ def package_resource(build, config, package): +name: #@ package_resource_name(build, config, package) +type: gcs-resource +source: + bucket: #@ data.values.google.storage.bucket + json_key: ((gcr-json-key)) + regexp: #@ data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/" + package.regexp #@ end +--- #@ def put_package(build, config): - - in_parallel: - fail_fast: true - steps: - - put: #@ package_resource_name(build, config) - params: - file: #@ "package/*" + build.package_suffix - - put: #@ package_resource_name(build, config) + "-hash" - params: - file: #@ "package/*" + build.package_suffix + ".sha512" +in_parallel: + fail_fast: true + steps: + #@ for package in build.packages: + - put: #@ package_resource_name(build, config, package) + params: + file: #@ "package/" + package.glob + #@ end #@ end +--- #@ def build_resources(builds, configs): #@ for build in builds: - #@ for/end config in configs: - - #@ template.replace(package_resource(extends_build(build), config)) - #@ gci_resource(image_family_name(extends_build(build).image_family)) - - #@ gci_resource(extends_build(build).source_image_family) #@ end #@ end +--- #@ def build_jobs(builds, configs): #@ for build in builds: #@ for/end config in configs: - #@ build_job(extends_build(build), config) - - #@ packer_job(extends_build(build)) #@ end #@ end diff --git a/ci/pr/data.yml b/ci/pr/data.yml new file mode 100644 index 0000000000..8f14f71912 --- /dev/null +++ b/ci/pr/data.yml @@ -0,0 +1,24 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + +#@ load("@ytt:overlay", "overlay") + +#@data/values +--- +#@overlay/match missing_ok=True +configs: + #@overlay/append + - name: debug + config: Debug diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml new file mode 100644 index 0000000000..f396c15eb0 --- /dev/null +++ b/ci/pr/pipeline.yml @@ -0,0 +1,61 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + +#@ load("templates.lib.yml", +#@ "git_resource", +#@ "gci_resource", +#@ "extends_build", +#@ "image_family_name", +#@ "registry_image_resource", +#@ "build_job_name", +#@ "packer_job", +#@ "packer_job_name", +#@ "docker_job", +#@ "docker_job_name", +#@ "update_pipeline_job", +#@ "update_pipeline_job_name", +#@ "version_source_job", +#@ "version_source_job_name") +#@ load("@ytt:data", "data") +#@ load("@ytt:overlay", "overlay") +#@ load("@ytt:struct", "struct") +#@ load("@ytt:template", "template") + +#@overlay/match by=overlay.all +--- + +jobs: + #@overlay/match by="name", missing_ok=True + #@overlay/append + - #@ update_pipeline_job("pr") + + #@overlay/match missing_ok=True +groups: + - name: builds + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + #@ end + - name: meta + jobs: + - #@ update_pipeline_job_name() + - name: all + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + #@ end + - #@ update_pipeline_job_name() diff --git a/ci/release/data.yml b/ci/release/data.yml new file mode 100644 index 0000000000..129c89b2e4 --- /dev/null +++ b/ci/release/data.yml @@ -0,0 +1,27 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + +#@ load("@ytt:overlay", "overlay") + +#@data/values +--- +#@overlay/match missing_ok=True +configs: + #@overlay/append + - name: debug + config: Debug + #@overlay/append + - name: release + config: RelWithDebInfo diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml new file mode 100644 index 0000000000..2313933d83 --- /dev/null +++ b/ci/release/pipeline.yml @@ -0,0 +1,116 @@ +#! Licensed to the Apache Software Foundation (ASF) under one or more +#! contributor license agreements. See the NOTICE file distributed with +#! this work for additional information regarding copyright ownership. +#! The ASF licenses this file to You under the Apache License, Version 2.0 +#! (the "License"); you may not use this file except in compliance with +#! the License. You may obtain a copy of the License at +#! +#! http://www.apache.org/licenses/LICENSE-2.0 +#! +#! Unless required by applicable law or agreed to in writing, software +#! distributed under the License is distributed on an "AS IS" BASIS, +#! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#! See the License for the specific language governing permissions and +#! limitations under the License. + +#@ load("templates.lib.yml", +#@ "git_resource", +#@ "gci_resource", +#@ "extends_build", +#@ "image_family_name", +#@ "registry_image_resource", +#@ "build_job_name", +#@ "packer_job", +#@ "packer_job_name", +#@ "docker_job", +#@ "docker_job_name", +#@ "update_pipeline_job", +#@ "update_pipeline_job_name", +#@ "build_resources", +#@ "package_resource", +#@ "package_resource_name", +#@ "put_package", +#@ "version_source_job", +#@ "version_source_job_name") +#@ load("@ytt:data", "data") +#@ load("@ytt:overlay", "overlay") +#@ load("@ytt:struct", "struct") +#@ load("@ytt:template", "template") + +#@overlay/match by=overlay.all +--- + +resources: + #@overlay/append + - #@ registry_image_resource("packer-image", "hashicorp/packer") + #@overlay/append + - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) + #@ for build in data.values.builds: + #@overlay/append + - #@ gci_resource(extends_build(build).source_image_family) + #@ for config in data.values.configs: + #@ for package in build.packages: + #@overlay/append + - #@ package_resource(extends_build(build), config , package) + #@ end + #@ end + #@ end + +jobs: + #@overlay/match by="name", missing_ok=True + #@overlay/append + - #@ version_source_job() + #@overlay/append + - #@ update_pipeline_job("release") + #@overlay/append + - #@ docker_job("task-image", "ci-source", "ci/docker/task") + #@ for build in data.values.builds: + #@overlay/append + - #@ packer_job(extends_build(build)) + #@ for config in data.values.configs: + #@overlay/match by="name", missing_ok=True + - name: #@ build_job_name(extends_build(build), config) + plan: + #@overlay/match by=overlay.index(0) + - in_parallel: + steps: + #@overlay/match by=overlay.subset({"get": "version"}) + - get: version + #@overlay/match missing_ok=True + passed: [ version-source ] + #@overlay/match by=overlay.subset({"get": "source"}) + - get: source + #@overlay/match missing_ok=True + passed: [ version-source ] + #@overlay/match by=overlay.all + #@overlay/append + - #@ put_package(build, config) + #@ end + #@ end + + #@overlay/match missing_ok=True +groups: + - name: builds + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + #@ end + - name: images + jobs: + #@ for/end build in data.values.builds: + - #@ packer_job_name(extends_build(build)) + - #@ docker_job_name("task-image") + - name: meta + jobs: + - #@ update_pipeline_job_name() + - name: all + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + - #@ packer_job_name(extends_build(build)) + #@ end + - #@ update_pipeline_job_name() + - #@ docker_job_name("task-image") + - #@ version_source_job_name() diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index 7e059830d5..b365b5f430 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -33,7 +33,7 @@ Parameter Description Default --google-storage-key Google Compute Storage key prefix. Based on pipeline value. --fly Path to fly executable. "fly" --ytt Path to ytt executable. "ytt" ---output Rendered pipeline file. Temporary file. +--output Rendered pipeline files directory. Temporary directory. EOF } @@ -57,7 +57,7 @@ ytt=${ytt:-ytt} fly=${fly:-fly} target=${target:-default} -output=${output:-$(mktemp)} +output=${output:-$(mktemp -d)} branch=${branch:-$(git rev-parse --abbrev-ref HEAD)} git_tracking_branch=${git_tracking_branch:-$(git for-each-ref --format='%(upstream:short)' $(git symbolic-ref -q HEAD))} @@ -78,15 +78,27 @@ google_zone=${google_zone:-$(gcloud config get-value compute/zone)} google_storage_bucket=${google_storage_bucket:-${google_project}-concourse} google_storage_key=${google_storage_key:-geode-native/${pipeline}} -bash -c "${ytt} \$@" ytt -f pipeline.yml -f templates.lib.yml -f templates.lib.txt -f data.yml \ - --data-value pipeline.name=${pipeline} \ - --data-value repository.url=${repository} \ - --data-value repository.branch=${branch} \ - --data-value google.project=${google_project} \ - --data-value google.zone=${google_zone} \ - --data-value google.storage.bucket=${google_storage_bucket} \ - --data-value google.storage.key=${google_storage_key} \ - > ${output} - -bash -c "${fly} \$@" fly --target=${target} \ - set-pipeline --pipeline=${pipeline} --config=${output} +variants=${variants:-"release pr"} +variants_release=${variant_release:-""} + +for variant in ${variants}; do + eval pipeline_suffix=\${variants_${variant}-"-${variant}"} + + bash -c "${ytt} \$@" ytt \ + --file lib \ + --file base \ + --file ${variant} \ + --data-value pipeline.name=${pipeline} \ + --data-value repository.url=${repository} \ + --data-value repository.branch=${branch} \ + --data-value google.project=${google_project} \ + --data-value google.zone=${google_zone} \ + --data-value google.storage.bucket=${google_storage_bucket} \ + --data-value google.storage.key=${google_storage_key} \ + > ${output}/${variant}.yml + + + bash -c "${fly} \$@" fly --target=${target} \ + set-pipeline --pipeline="${pipeline}${pipeline_suffix}" --config=${output}/${variant}.yml + +done \ No newline at end of file From 76e6f3ad01e5b5203f959ada213a7d71825b9338 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 13:22:31 -0800 Subject: [PATCH 080/155] Reduce redundancy with templates. --- ci/base/base.yml | 90 ++++++++++++++++-------------------------------- 1 file changed, 29 insertions(+), 61 deletions(-) diff --git a/ci/base/base.yml b/ci/base/base.yml index 3995397008..6f6dfacef3 100644 --- a/ci/base/base.yml +++ b/ci/base/base.yml @@ -13,7 +13,24 @@ #! See the License for the specific language governing permissions and #! limitations under the License. -#@ load("@ytt:overlay", "overlay") +#@ load("@ytt:template", "template") + +--- +#@ def new_build(name, package="Linux-64bit.tar.gz"): +name: #@ name +with_dot_net: #@ False +params: + CMAKE_CONFIGURE_FLAGS: + CMAKE_BUILD_FLAGS: "-j16" + CPACK_GENERATORS: "TGZ" +packages: + - name: archive + regexp: #@ "apache-geode-native-(?P.*)-" + package + glob: #@ "*-" + package + - name: archive.sha512 + regexp: #@ "apache-geode-native-(?P.*)-" + package + ".sha512" + glob: #@ "*-" + package + ".sha512" +#@ end #@data/values --- @@ -32,80 +49,31 @@ google: key: geode-native/develop builds: - - name: windows + - _: #@ template.replace(new_build("windows", "Windows-64bit.zip")) image_family: build-windows-2019-vs-2017 source_image_family: windows-2019 + #@yaml/map-key-override with_dot_net: #@ True + #@yaml/map-key-override params: CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" CMAKE_BUILD_FLAGS: "/m" CPACK_GENERATORS: "ZIP" - packages: - - name: archive - regexp: apache-geode-native-(?P.*)-Windows-64bit.zip - glob: "*-Windows-64bit.zip" - - name: archive.sha512 - regexp: apache-geode-native-(?P.*)-Windows-64bit.zip.sha512 - glob: "*-Windows-64bit.zip.sha512" - - name: rhel-7 + + - _: #@ template.replace(new_build("rhel-7")) image_family: build-rhel-7 source_image_family: rhel-7 - with_dot_net: #@ False - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - packages: - - name: archive - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz - glob: "*-Linux-64bit.tar.gz" - - name: archive.sha512 - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 - glob: "*-Linux-64bit.tar.gz.sha512" - - name: rhel-8 + + - _: #@ template.replace(new_build("rhel-8")) image_family: build-rhel-8 source_image_family: rhel-8 - with_dot_net: #@ False - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - packages: - - name: archive - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz - glob: "*-Linux-64bit.tar.gz" - - name: archive.sha512 - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 - glob: "*-Linux-64bit.tar.gz.sha512" - - name: ubuntu-16.04 + + - _: #@ template.replace(new_build("ubuntu-16.04")) image_family: build-ubuntu-16-04 source_image_family: ubuntu-1604-lts - with_dot_net: #@ False - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - packages: - - name: archive - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz - glob: "*-Linux-64bit.tar.gz" - - name: archive.sha512 - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 - glob: "*-Linux-64bit.tar.gz.sha512" - - name: ubuntu-18.04 + + - _: #@ template.replace(new_build("ubuntu-18.04")) image_family: build-ubuntu-18-04 source_image_family: ubuntu-1804-lts - with_dot_net: #@ False - params: - CMAKE_CONFIGURE_FLAGS: - CMAKE_BUILD_FLAGS: "-j16" - CPACK_GENERATORS: "TGZ" - packages: - - name: archive - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz - glob: "*-Linux-64bit.tar.gz" - - name: archive.sha512 - regexp: apache-geode-native-(?P.*)-Linux-64bit.tar.gz.sha512 - glob: "*-Linux-64bit.tar.gz.sha512" configs: [ ] From b870b8a48acfdce9ac4227cc74d9fbe80e3c0c90 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 15:53:55 -0800 Subject: [PATCH 081/155] Cleanup --- ci/pr/pipeline.yml | 1 - ci/release/pipeline.yml | 1 - 2 files changed, 2 deletions(-) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index f396c15eb0..c419c06767 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -37,7 +37,6 @@ --- jobs: - #@overlay/match by="name", missing_ok=True #@overlay/append - #@ update_pipeline_job("pr") diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index 2313933d83..154d0c62fc 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -57,7 +57,6 @@ resources: #@ end jobs: - #@overlay/match by="name", missing_ok=True #@overlay/append - #@ version_source_job() #@overlay/append From 157b18e18da48768733804852367071c083fffe8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 15:54:29 -0800 Subject: [PATCH 082/155] PR resource --- ci/README.md | 3 ++- ci/pr/pipeline.yml | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/ci/README.md b/ci/README.md index d3197a4b96..d138494879 100644 --- a/ci/README.md +++ b/ci/README.md @@ -52,4 +52,5 @@ This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `gi #TODO ## Concourse Installation * helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 -* kubectl create secret generic gcr-json-key --from-literal "value=$(cat ~/Downloads/gemfire-dev-6e8864f0768c.json)" --namespace=concourse-main +* kubectl create secret generic gcr-json-key --from-literal "value=$(cat XXX.json)" --namespace=concourse-main +* kubectl create secret generic github-access-token --from-literal "value=XXX" --namespace=concourse-main diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index c419c06767..60b3b0d689 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -35,6 +35,25 @@ #@overlay/match by=overlay.all --- +resource_types: + #@overlay/append + - name: pull-request + type: docker-image + source: + repository: teliaoss/github-pr-resource + +resources: + #@overlay/match by="name" + #@overlay/replace + - name: source + type: pull-request + check_every: 1m + icon: github + source: + #@ parts = data.values.repository.url.rstrip(".git").rsplit("/", 2) + repository: #@ parts[1] + "/" + parts[2] + access_token: ((github-access-token)) + base_branch: #@ data.values.repository.branch jobs: #@overlay/append From 63f4e55223f5ac08b86b5321a9b12188c55c3c42 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 16:31:55 -0800 Subject: [PATCH 083/155] Cleanup --- ci/release/pipeline.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index 154d0c62fc..6d472075f4 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -81,7 +81,6 @@ jobs: - get: source #@overlay/match missing_ok=True passed: [ version-source ] - #@overlay/match by=overlay.all #@overlay/append - #@ put_package(build, config) #@ end From 920f1243c7205e27275bf279db828d97b9d15921 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 18:55:04 -0800 Subject: [PATCH 084/155] Copy source from worker to builder. --- ci/lib/templates.lib.txt | 8 ++++---- ci/lib/templates.lib.yml | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ci/lib/templates.lib.txt b/ci/lib/templates.lib.txt index 1f23d3e806..b6df570d4c 100644 --- a/ci/lib/templates.lib.txt +++ b/ci/lib/templates.lib.txt @@ -55,12 +55,12 @@ function remote_shell { ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} "$@" } -function remote_copy { - scp ${SSH_OPTIONS} -i ${ssh_key_file} "${INSTANCE_USER}@${external_ip}:${1}" "$2" +function remote_download { + scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r -p "${INSTANCE_USER}@${external_ip}:${1}" "$2" } -function remote_copy_recursive { - scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${INSTANCE_USER}@${external_ip}:${1}" "$2" +function remote_upload { + scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r -p "$1" "${INSTANCE_USER}@${external_ip}:${2}" } (@- end @) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 643dd4a128..4c39021921 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -224,7 +224,7 @@ config: version=$(cat version/number) builddate=$(date "+%Y-%m-%d") - remote_shell git clone ${git_url} source + remote_upload source . remote_shell cmake -E chdir source git checkout ${git_rev} remote_shell cmake -E make_directory build remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} \ @@ -239,11 +239,11 @@ config: packages=$(awk '/^CPack: - package: / {print $4}' cpack.out) for package in ${packages}; do - remote_copy ${package} package/ + remote_download ${package} package/ done checksums=$(awk '/^CPack: - checksum file: / {print $5}' cpack.out) for checksum in ${checksums}; do - remote_copy ${checksum} package/ + remote_download ${checksum} package/ done #@ end @@ -326,7 +326,7 @@ config: set -ueo pipefail (@= remote_functions() @) - remote_copy_recursive build . + remote_download build . #@ end --- From 2da6b40aa6f727be11e9dc8f849d2d83464d0bf3 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 21:34:13 -0800 Subject: [PATCH 085/155] PR status --- ci/lib/templates.lib.yml | 12 +++++++--- ci/pr/pipeline.yml | 48 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 4c39021921..60af09186f 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -373,12 +373,12 @@ plan: trigger: true - get: source trigger: true + - get: task-image - do: - in_parallel: fail_fast: true steps: - get: gcloud-image - - get: task-image - get: #@ gci_resource_name(image_family_name(build.image_family)) trigger: true - #@ create_instance(build, config) @@ -392,8 +392,14 @@ plan: - #@ net_integration_test_task(build, config) - #@ net_legacy_integration_test_task(build, config) #@ end -on_failure: #@ download_build_task() -ensure: #@ delete_instance() +on_failure: + in_parallel: + steps: + - #@ download_build_task() +ensure: + in_parallel: + steps: + - #@ delete_instance() #@ end --- diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 60b3b0d689..ad6257de77 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -33,6 +33,20 @@ #@ load("@ytt:struct", "struct") #@ load("@ytt:template", "template") +--- +#@ def pr_status(status, context): +try: + put: #@ "pr-status-" + status + resource: source + params: + path: source + status: #@ status + context: #@ context + get_params: + skip_download: true + attempts: 5 +#@ end + #@overlay/match by=overlay.all --- resource_types: @@ -55,11 +69,43 @@ resources: access_token: ((github-access-token)) base_branch: #@ data.values.repository.branch +#@overlay/match-child-defaults missing_ok=True jobs: #@overlay/append - #@ update_pipeline_job("pr") + #@ for build in data.values.builds: + #@ for config in data.values.configs: + #@ name = build_job_name(extends_build(build), config) + #@overlay/match by="name", missing_ok=True + - name: #@ name + plan: + #@overlay/match by=overlay.index(0) + - in_parallel: + fail_fast: true + steps: + #@overlay/match by=overlay.subset({"get": "source"}) + - get: source + on_success: #@ pr_status("pending", name) + #@ end + #@ end + on_error: + in_parallel: + steps: + #@overlay/append + - #@ pr_status("error", name) + on_failure: + in_parallel: + steps: + #@overlay/append + - #@ pr_status("failure", name) + on_success: + in_parallel: + steps: + #@overlay/append + - #@ pr_status("success", name) + - #@overlay/match missing_ok=True + #@overlay/match missing_ok=True groups: - name: builds jobs: From eeee95772b443f152b9fffbd8fe89a2dc68196cd Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 21:40:32 -0800 Subject: [PATCH 086/155] Get every PR --- ci/pr/pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index ad6257de77..c56dff1943 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -85,6 +85,7 @@ jobs: steps: #@overlay/match by=overlay.subset({"get": "source"}) - get: source + version: every on_success: #@ pr_status("pending", name) #@ end #@ end From 179e030e82591e3f4787778316e55dda0eea7ebe Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 21:53:40 -0800 Subject: [PATCH 087/155] Fixes PR --- ci/pr/pipeline.yml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index c56dff1943..7e8444c278 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -34,7 +34,7 @@ #@ load("@ytt:template", "template") --- -#@ def pr_status(status, context): +#@ def pr_status(context, status, description = None): try: put: #@ "pr-status-" + status resource: source @@ -42,6 +42,7 @@ try: path: source status: #@ status context: #@ context + description: #@ description get_params: skip_download: true attempts: 5 @@ -86,25 +87,25 @@ jobs: #@overlay/match by=overlay.subset({"get": "source"}) - get: source version: every - on_success: #@ pr_status("pending", name) - #@ end - #@ end + on_success: #@ pr_status(name, "pending") on_error: in_parallel: steps: #@overlay/append - - #@ pr_status("error", name) + - #@ pr_status(name, "error") on_failure: in_parallel: steps: #@overlay/append - - #@ pr_status("failure", name) + - #@ pr_status(name, "failure") on_success: in_parallel: steps: #@overlay/append - - #@ pr_status("success", name) + - #@ pr_status(name, "success") + #@ end + #@ end #@overlay/match missing_ok=True groups: From 6676bdb9d5d95dd14c30eac14c997bd5fd3783aa Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 22:18:01 -0800 Subject: [PATCH 088/155] scp -p craps out windows. --- ci/lib/templates.lib.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/lib/templates.lib.txt b/ci/lib/templates.lib.txt index b6df570d4c..5b2002b140 100644 --- a/ci/lib/templates.lib.txt +++ b/ci/lib/templates.lib.txt @@ -56,11 +56,11 @@ function remote_shell { } function remote_download { - scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r -p "${INSTANCE_USER}@${external_ip}:${1}" "$2" + scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${INSTANCE_USER}@${external_ip}:${1}" "$2" } function remote_upload { - scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r -p "$1" "${INSTANCE_USER}@${external_ip}:${2}" + scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "$1" "${INSTANCE_USER}@${external_ip}:${2}" } (@- end @) From ce74a5a9d470e59d82913730b7f9ab4cbfe21768 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 22:27:26 -0800 Subject: [PATCH 089/155] Add error for canceled jobs. --- ci/pr/pipeline.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 7e8444c278..c6a626578a 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -93,6 +93,11 @@ jobs: steps: #@overlay/append - #@ pr_status(name, "error") + on_abort: + in_parallel: + steps: + #@overlay/append + - #@ pr_status(name, "error", "Build canceled") on_failure: in_parallel: steps: From 6c911990c368799dc99ebcf69f3cdce26248db40 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 22:56:48 -0800 Subject: [PATCH 090/155] Refactor common jobs. --- ci/base/pipeline.yml | 19 +++++++++++++++++++ ci/lib/templates.lib.yml | 6 ++++-- ci/pr/data.yml | 5 ++++- ci/pr/pipeline.yml | 29 ++++------------------------- ci/release/data.yml | 5 ++++- ci/release/pipeline.yml | 24 +++++++----------------- ci/set-pipeline.sh | 1 + 7 files changed, 43 insertions(+), 46 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index 5f5bac16da..0781ef37e5 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -35,6 +35,7 @@ #@ load("@ytt:struct", "struct") #@ load("@ytt:template", "template") +--- resource_types: - name: gci type: docker-image @@ -56,4 +57,22 @@ resources: jobs: - #@ template.replace(build_jobs(data.values.builds, data.values.configs)) + - #@ update_pipeline_job() +groups: + - name: builds + jobs: + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + #@ end + - name: meta + jobs: + - #@ update_pipeline_job_name() + - name: all + jobs: + - #@ update_pipeline_job_name() + #@ for build in data.values.builds: + #@ for/end config in data.values.configs: + - #@ build_job_name(extends_build(build), config) + #@ end diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 60af09186f..c690d9f4a5 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -458,7 +458,7 @@ plan: #@ end --- -#@ def update_pipeline_job(variant): +#@ def update_pipeline_job(): name: #@ update_pipeline_job_name() serial: true plan: @@ -486,10 +486,12 @@ plan: - --file - source/ci/base - --file - - #@ "source/ci/" + variant + - #@ "source/ci/" + data.values.pipeline.variant - --data-value - #@ "pipeline.name=" + data.values.pipeline.name - --data-value + - #@ "pipeline.variant=" + data.values.pipeline.variant + - --data-value - #@ "repository.url=" + data.values.repository.url - --data-value - #@ "repository.branch=" + data.values.repository.branch diff --git a/ci/pr/data.yml b/ci/pr/data.yml index 8f14f71912..bfff207b33 100644 --- a/ci/pr/data.yml +++ b/ci/pr/data.yml @@ -16,8 +16,11 @@ #@ load("@ytt:overlay", "overlay") #@data/values +#@overlay/match-child-defaults missing_ok=True --- -#@overlay/match missing_ok=True +pipeline: + variant: pr + configs: #@overlay/append - name: debug diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index c6a626578a..31ec3b2d85 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -72,11 +72,9 @@ resources: #@overlay/match-child-defaults missing_ok=True jobs: - #@overlay/append - - #@ update_pipeline_job("pr") - #@ for build in data.values.builds: - #@ for config in data.values.configs: - #@ name = build_job_name(extends_build(build), config) + #@ for build in data.values.builds: + #@ for config in data.values.configs: + #@ name = build_job_name(extends_build(build), config) #@overlay/match by="name", missing_ok=True - name: #@ name plan: @@ -110,23 +108,4 @@ jobs: - #@ pr_status(name, "success") #@ end - #@ end - - #@overlay/match missing_ok=True -groups: - - name: builds - jobs: - #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) - #@ end - - name: meta - jobs: - - #@ update_pipeline_job_name() - - name: all - jobs: - #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) - #@ end - - #@ update_pipeline_job_name() + #@ end diff --git a/ci/release/data.yml b/ci/release/data.yml index 129c89b2e4..4ddd6dec2d 100644 --- a/ci/release/data.yml +++ b/ci/release/data.yml @@ -16,8 +16,11 @@ #@ load("@ytt:overlay", "overlay") #@data/values +#@overlay/match-child-defaults missing_ok=True --- -#@overlay/match missing_ok=True +pipeline: + variant: release + configs: #@overlay/append - name: debug diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index 6d472075f4..0bdb44cfd0 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -39,7 +39,6 @@ #@overlay/match by=overlay.all --- - resources: #@overlay/append - #@ registry_image_resource("packer-image", "hashicorp/packer") @@ -60,8 +59,6 @@ jobs: #@overlay/append - #@ version_source_job() #@overlay/append - - #@ update_pipeline_job("release") - #@overlay/append - #@ docker_job("task-image", "ci-source", "ci/docker/task") #@ for build in data.values.builds: #@overlay/append @@ -86,29 +83,22 @@ jobs: #@ end #@ end - #@overlay/match missing_ok=True + #@overlay/match-child-defaults missing_ok=True groups: - - name: builds - jobs: - #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) - #@ end + #@overlay/match by="name" - name: images jobs: #@ for/end build in data.values.builds: - #@ packer_job_name(extends_build(build)) - #@ docker_job_name("task-image") - - name: meta - jobs: - - #@ update_pipeline_job_name() + #@overlay/match by="name" - name: all jobs: #@ for build in data.values.builds: - #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) + #@overlay/append - #@ packer_job_name(extends_build(build)) - #@ end - - #@ update_pipeline_job_name() + #@ end + #@overlay/append - #@ docker_job_name("task-image") + #@overlay/append - #@ version_source_job_name() diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index b365b5f430..d901925e01 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -89,6 +89,7 @@ for variant in ${variants}; do --file base \ --file ${variant} \ --data-value pipeline.name=${pipeline} \ + --data-value pipeline.variant=${variant} \ --data-value repository.url=${repository} \ --data-value repository.branch=${branch} \ --data-value google.project=${google_project} \ From 0d4c891150ae26aa9a6ca7076c3efa45d08c33cb Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 23:09:39 -0800 Subject: [PATCH 091/155] Resource type function. --- ci/base/pipeline.yml | 11 +++-------- ci/lib/templates.lib.yml | 10 +++++++++- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index 0781ef37e5..4b96131c31 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -14,6 +14,7 @@ #! limitations under the License. #@ load("templates.lib.yml", +#@ "resource_type", #@ "gcr_image_resource", #@ "project_gcr_image_resource", #@ "git_resource", @@ -37,14 +38,8 @@ --- resource_types: - - name: gci - type: docker-image - source: - repository: smgoller/gci-resource - - name: gcs-resource - type: docker-image - source: - repository: frodenas/gcs-resource + - #@ resource_type("gci-resource", "smgoller/gci-resource") + - #@ resource_type("gcs-resource", "frodenas/gcs-resource") resources: - #@ semver_resource("version", "1.14.0-build.0") diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index c690d9f4a5..736181eb03 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -34,6 +34,14 @@ #@ return build #@ end +--- +#@ def resource_type(name, repository): +name: #@ name +type: docker-image +source: + repository: #@ repository +#@ end + --- #@ def resource(name, type, source=None, icon=None): #@ return { @@ -106,7 +114,7 @@ --- #@ def gci_resource(family, project=data.values.google.project): -#@ return resource(gci_resource_name(family), "gci", { +#@ return resource(gci_resource_name(family), "gci-resource", { #@ "key": "((gcr-json-key))", #@ "family_project": project, #@ "family": family, From 46783f1781b7ac60e4b5066e0d50b66d6c53b157 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 23:11:24 -0800 Subject: [PATCH 092/155] Remove extends_build --- ci/base/pipeline.yml | 5 ++--- ci/lib/templates.lib.yml | 9 ++------- ci/pr/pipeline.yml | 3 +-- ci/release/pipeline.yml | 13 ++++++------- 4 files changed, 11 insertions(+), 19 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index 4b96131c31..22b5a0d615 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -18,7 +18,6 @@ #@ "gcr_image_resource", #@ "project_gcr_image_resource", #@ "git_resource", -#@ "extends_build", #@ "registry_image_resource", #@ "semver_resource", #@ "build_resources", @@ -59,7 +58,7 @@ groups: jobs: #@ for build in data.values.builds: #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) + - #@ build_job_name(build, config) #@ end - name: meta jobs: @@ -69,5 +68,5 @@ groups: - #@ update_pipeline_job_name() #@ for build in data.values.builds: #@ for/end config in data.values.configs: - - #@ build_job_name(extends_build(build), config) + - #@ build_job_name(build, config) #@ end diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 736181eb03..27b8d3a74b 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -29,11 +29,6 @@ #@ load("@ytt:overlay", "overlay") #@ load("@ytt:struct", "struct") ---- -#@ def extends_build(build): -#@ return build -#@ end - --- #@ def resource_type(name, repository): name: #@ name @@ -601,7 +596,7 @@ in_parallel: --- #@ def build_resources(builds, configs): #@ for build in builds: - - #@ gci_resource(image_family_name(extends_build(build).image_family)) + - #@ gci_resource(image_family_name(build.image_family)) #@ end #@ end @@ -609,6 +604,6 @@ in_parallel: #@ def build_jobs(builds, configs): #@ for build in builds: #@ for/end config in configs: - - #@ build_job(extends_build(build), config) + - #@ build_job(build, config) #@ end #@ end diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 31ec3b2d85..66e6b600bf 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -16,7 +16,6 @@ #@ load("templates.lib.yml", #@ "git_resource", #@ "gci_resource", -#@ "extends_build", #@ "image_family_name", #@ "registry_image_resource", #@ "build_job_name", @@ -74,7 +73,7 @@ resources: jobs: #@ for build in data.values.builds: #@ for config in data.values.configs: - #@ name = build_job_name(extends_build(build), config) + #@ name = build_job_name(build, config) #@overlay/match by="name", missing_ok=True - name: #@ name plan: diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index 0bdb44cfd0..033b428fbf 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -16,7 +16,6 @@ #@ load("templates.lib.yml", #@ "git_resource", #@ "gci_resource", -#@ "extends_build", #@ "image_family_name", #@ "registry_image_resource", #@ "build_job_name", @@ -46,11 +45,11 @@ resources: - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) #@ for build in data.values.builds: #@overlay/append - - #@ gci_resource(extends_build(build).source_image_family) + - #@ gci_resource(build.source_image_family) #@ for config in data.values.configs: #@ for package in build.packages: #@overlay/append - - #@ package_resource(extends_build(build), config , package) + - #@ package_resource(build, config , package) #@ end #@ end #@ end @@ -62,10 +61,10 @@ jobs: - #@ docker_job("task-image", "ci-source", "ci/docker/task") #@ for build in data.values.builds: #@overlay/append - - #@ packer_job(extends_build(build)) + - #@ packer_job(build) #@ for config in data.values.configs: #@overlay/match by="name", missing_ok=True - - name: #@ build_job_name(extends_build(build), config) + - name: #@ build_job_name(build, config) plan: #@overlay/match by=overlay.index(0) - in_parallel: @@ -89,14 +88,14 @@ groups: - name: images jobs: #@ for/end build in data.values.builds: - - #@ packer_job_name(extends_build(build)) + - #@ packer_job_name(build) - #@ docker_job_name("task-image") #@overlay/match by="name" - name: all jobs: #@ for build in data.values.builds: #@overlay/append - - #@ packer_job_name(extends_build(build)) + - #@ packer_job_name(build) #@ end #@overlay/append - #@ docker_job_name("task-image") From f7bd0404377fe9d125bb1120ebf8d17ce662ec4b Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 23:23:18 -0800 Subject: [PATCH 093/155] Cleanup --- ci/base/pipeline.yml | 9 +-------- ci/pr/pipeline.yml | 22 +++------------------- ci/release/pipeline.yml | 7 ------- 3 files changed, 4 insertions(+), 34 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index 22b5a0d615..52acb8dad0 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -23,16 +23,9 @@ #@ "build_resources", #@ "build_jobs", #@ "build_job_name", -#@ "packer_job_name", -#@ "docker_job", -#@ "docker_job_name", #@ "update_pipeline_job", -#@ "update_pipeline_job_name", -#@ "version_source_job", -#@ "version_source_job_name") +#@ "update_pipeline_job_name") #@ load("@ytt:data", "data") -#@ load("@ytt:overlay", "overlay") -#@ load("@ytt:struct", "struct") #@ load("@ytt:template", "template") --- diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 66e6b600bf..41d20a7e23 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -14,23 +14,10 @@ #! limitations under the License. #@ load("templates.lib.yml", -#@ "git_resource", -#@ "gci_resource", -#@ "image_family_name", -#@ "registry_image_resource", #@ "build_job_name", -#@ "packer_job", -#@ "packer_job_name", -#@ "docker_job", -#@ "docker_job_name", -#@ "update_pipeline_job", -#@ "update_pipeline_job_name", -#@ "version_source_job", -#@ "version_source_job_name") +#@ "resource_type") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") -#@ load("@ytt:struct", "struct") -#@ load("@ytt:template", "template") --- #@ def pr_status(context, status, description = None): @@ -51,16 +38,13 @@ try: --- resource_types: #@overlay/append - - name: pull-request - type: docker-image - source: - repository: teliaoss/github-pr-resource + - #@ resource_type("github-pr-resource", "teliaoss/github-pr-resource") resources: #@overlay/match by="name" #@overlay/replace - name: source - type: pull-request + type: github-pr-resource check_every: 1m icon: github source: diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index 033b428fbf..b37307022f 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -16,25 +16,18 @@ #@ load("templates.lib.yml", #@ "git_resource", #@ "gci_resource", -#@ "image_family_name", #@ "registry_image_resource", #@ "build_job_name", #@ "packer_job", #@ "packer_job_name", #@ "docker_job", #@ "docker_job_name", -#@ "update_pipeline_job", -#@ "update_pipeline_job_name", -#@ "build_resources", #@ "package_resource", -#@ "package_resource_name", #@ "put_package", #@ "version_source_job", #@ "version_source_job_name") #@ load("@ytt:data", "data") #@ load("@ytt:overlay", "overlay") -#@ load("@ytt:struct", "struct") -#@ load("@ytt:template", "template") #@overlay/match by=overlay.all --- From 19ecdff57852e17b6fd84dad9560504c8451a91e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 23:39:15 -0800 Subject: [PATCH 094/155] Stubs out GitHub Release uploading. --- ci/release/pipeline.yml | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index b37307022f..cf6e67b850 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -23,6 +23,7 @@ #@ "docker_job", #@ "docker_job_name", #@ "package_resource", +#@ "package_resource_name", #@ "put_package", #@ "version_source_job", #@ "version_source_job_name") @@ -74,8 +75,41 @@ jobs: - #@ put_package(build, config) #@ end #@ end + #@overlay/append + - name: github-release + plan: + - in_parallel: + fail_fast: true + steps: + - get: version + passed: + #@ for build in data.values.builds: + #@ for config in data.values.configs: + - #@ build_job_name(build, config) + #@ end + #@ end + trigger: true + - get: source + passed: + #@ for build in data.values.builds: + #@ for config in data.values.configs: + - #@ build_job_name(build, config) + #@ end + #@ end + trigger: true + #@ for build in data.values.builds: + #@ for config in data.values.configs: + #@ for package in build.packages: + - get: #@ package_resource_name(build, config, package) + passed: + - #@ build_job_name(build, config) + trigger: true + #@ end + #@ end + #@ end - #@overlay/match-child-defaults missing_ok=True + + #@overlay/match-child-defaults missing_ok=True groups: #@overlay/match by="name" - name: images @@ -94,3 +128,5 @@ groups: - #@ docker_job_name("task-image") #@overlay/append - #@ version_source_job_name() + #@overlay/append + - github-release From fdfc2977bb67bda0710f2651c0030b9626bf0259 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 11:24:15 -0800 Subject: [PATCH 095/155] Updates README.md --- ci/README.md | 63 +++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 18 deletions(-) diff --git a/ci/README.md b/ci/README.md index d138494879..f3c37ff730 100644 --- a/ci/README.md +++ b/ci/README.md @@ -1,10 +1,19 @@ -# Publish Pipeline +# Geode Native Concourse Pipeline +The Geode Native Concourse pipeline is actually two Concourse pipelines. +The primary release pipeline builds the release artifacts for a given branch, like develop, release or support branch. +The secondary pull request (pr) pipeline builds the same artifacts as the release pipeline but for pull requests and without actually releasing or publishing anything. + +The pipeline is fully self updating and can easily be bootstrapped into a properly configured Concourse deployment. +Concourse configuration requires TBD. + +# Pipeline Setup +The pipeline can be installed or reconfigured via the `set-pipelin.sh` shell script. ```console ./set-pipeline.sh --help ``` -## Example -Given the local repository looks like the following. +## Examples +Given the local repository looks like the following: ```console $ git remote get-url origin git@github.com:some-user/geode-native.git @@ -17,29 +26,43 @@ wip/some-branch The most typical usage should require nothing more than the Concourse target, unless that happens to be named "default". Everything else has reasonable defaults based on the currently checked out branch. ```console -$ ./set-pipeline.sh --target=test +$ ./set-pipeline.sh --target=some-concourse ``` -Executes `fly` from the path setting pipeline to target `test` for remote repository `git@github.com:some-user/geode-native.git`. -Pipeline name will be `some-user-wip-something` +Executes `fly` from the path setting pipeline to target `some-concourse` for remote repository `git@github.com:some-user/geode-native.git`. +Pipeline names will be `some-user-wip-something` and `some-user-wip-something-pr`. ### Alternative repository URL and fly version Sometimes you will have to support multiple versions of Concourse `fly` or need to fetch sources via https. ```console $ ./set-pipeline.sh \ --fly=/path/to/fly \ - --target=test \ + --target=some-concourse \ --repository=https://github.com/some-user/geode-native.git ``` -Executes fly at `/path/to/fly` setting pipeline to target `test` for remote repository `https://github.com/some-user/geode-native.git`. -Pipeline name will be `some-user-wip-something` +Executes fly at `/path/to/fly` setting pipeline to target `some-concourse` for remote repository `https://github.com/some-user/geode-native.git`. +Pipelines name will be `some-user-wip-something` and `some-user-wip-something-pr`. # Pipeline Steps -1. Creates VM instances -2. Waits for VM instance to be accessible -3. Builds and packages -4. Runs all tests -5. If anything fails it downloads the build directory for later analysis -6. Deletes the VM instances +## Release +* Detects new version or source +* Build for each platform and configuration + * Creates VM instances + * Waits for VM instance to be accessible + * Builds and packages + * Runs all tests + * If anything fails it downloads the build directory for later analysis + * Deletes the VM instances +* Publishes to GitHub release +## Pull Release (PR) +* Detects new PR +* Build for each platform and configuration + * Creates VM instances + * Waits for VM instance to be accessible + * Builds and packages + * Runs all tests + * If anything fails it downloads the build directory for later analysis + * Deletes the VM instances + * Updates PR status # Details This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `git` and `gcloud`. @@ -51,6 +74,10 @@ This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `gi #TODO ## Concourse Installation -* helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://35.222.132.46:8080 -* kubectl create secret generic gcr-json-key --from-literal "value=$(cat XXX.json)" --namespace=concourse-main -* kubectl create secret generic github-access-token --from-literal "value=XXX" --namespace=concourse-main +* Resolve chicken/egg problem with external API address. + * `helm install concourse concourse/concourse` + * `helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://1.2.3.4:8080` +* Task for getting secrets into k8s. + * `kubectl create secret generic gcr-json-key --from-literal "value=$(cat XXX.json)" --namespace=concourse-main` + * `kubectl create secret generic github-access-token --from-literal "value=XXX" --namespace=concourse-main` +* Use docker locally for initial pipeline deployment to avoid `gcloud`, `ytt`, and `fly` version issues. From 537eaf983a747fbabcf64315683d26643c411b0e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 11:36:22 -0800 Subject: [PATCH 096/155] Updates README.md --- ci/README.md | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/ci/README.md b/ci/README.md index f3c37ff730..ed3378885e 100644 --- a/ci/README.md +++ b/ci/README.md @@ -4,7 +4,10 @@ The primary release pipeline builds the release artifacts for a given branch, li The secondary pull request (pr) pipeline builds the same artifacts as the release pipeline but for pull requests and without actually releasing or publishing anything. The pipeline is fully self updating and can easily be bootstrapped into a properly configured Concourse deployment. -Concourse configuration requires TBD. +Concourse configuration requires TBD. Changes to the `ci` source directory will results in auto updates to the pipelines. + +Because Concourse workers aren't available on all platforms and have issues with resource sharing this pipeline utilizes external builders. +These builders are currently Google Compute VMs that are launched on demand for each build. # Pipeline Setup The pipeline can be installed or reconfigured via the `set-pipelin.sh` shell script. @@ -52,7 +55,9 @@ Pipelines name will be `some-user-wip-something` and `some-user-wip-something-pr * Runs all tests * If anything fails it downloads the build directory for later analysis * Deletes the VM instances -* Publishes to GitHub release + * Uploads artifacts to GCS +* Publishes to GitHub release (TODO) +* Detects changes to pipeline sources and auto updates ## Pull Release (PR) * Detects new PR * Build for each platform and configuration @@ -63,15 +68,24 @@ Pipelines name will be `some-user-wip-something` and `some-user-wip-something-pr * If anything fails it downloads the build directory for later analysis * Deletes the VM instances * Updates PR status +* Detects changes to pipeline sources and auto updates # Details This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `git` and `gcloud`. ## Dependencies +* [Google Cloud](https://console.cloud.google.com) * [Concourse](https://concourse-ci.org) v6.5.0+ * [`ytt`](https://get-ytt.io) v0.28.0+ * [`git`](https://git-scm.com) v2.25.2+ * [`gcloud`](https://cloud.google.com/sdk/docs/install) SDK +## Layout +* base - Defines all common tasks across both pipelines. +* release - Defines tasks tasks for release pipeline only. +* pr - Defines tasks for pr pipeline only. +* lib - ytt functions used by all templates. +* docker/task - Minimal image required to communicate with builders. + #TODO ## Concourse Installation * Resolve chicken/egg problem with external API address. From a0562d125f73813ec14d245a70124d3e8de981c7 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 11:39:02 -0800 Subject: [PATCH 097/155] Retry packer builds if they fail. --- ci/lib/templates.lib.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 27b8d3a74b..100d63aa30 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -429,6 +429,7 @@ plan: - get: packer-source trigger: false - task: build + attemps: 5 image: packer-image config: platform: linux From b6e2465e0ac29bec8b91da96d05baaf2d51ce1bb Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 11:41:24 -0800 Subject: [PATCH 098/155] Don't rebuild on image changes. --- ci/lib/templates.lib.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 100d63aa30..4fa2e6ac3f 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -383,7 +383,6 @@ plan: steps: - get: gcloud-image - get: #@ gci_resource_name(image_family_name(build.image_family)) - trigger: true - #@ create_instance(build, config) - do: - #@ build_task(config.config, build.params) From 716379a5b5b199cbb87fca13cb3bda359b752443 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 11:42:05 -0800 Subject: [PATCH 099/155] Fixes attempts --- ci/lib/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 4fa2e6ac3f..375a7ec42a 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -428,7 +428,7 @@ plan: - get: packer-source trigger: false - task: build - attemps: 5 + attempts: 5 image: packer-image config: platform: linux From 6a73a510a5b3d7921e8387bab0b78a1389f5b2ff Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 11:52:56 -0800 Subject: [PATCH 100/155] Update set-pipeline.sh help. --- ci/set-pipeline.sh | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index d901925e01..c0345fb382 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -14,12 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -xeuo pipefail +set -euo pipefail function printHelp() { cat << EOF $0 Usage: -Sets Concourse pipeline for Geode Native builds. +Sets Concourse pipelines for Geode Native builds. Options: Parameter Description Default @@ -33,7 +33,18 @@ Parameter Description Default --google-storage-key Google Compute Storage key prefix. Based on pipeline value. --fly Path to fly executable. "fly" --ytt Path to ytt executable. "ytt" +--variants Pipeline variants of publish. Both release and pr. --output Rendered pipeline files directory. Temporary directory. + +Example: +\$ $0 --target=my-target --google-zone=my-zone + +Environment Variables: +All options can be specified via environment variables where hyphens (-) are replaced with underscore (_). + +Example: +\$ target=my-target google_zone=my-zone $0 + EOF } From 0485b6b2da72943b628462f1f5807ba6ee04d030 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 12:30:39 -0800 Subject: [PATCH 101/155] Don't retrigger PRs on new versions. --- ci/pr/pipeline.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 41d20a7e23..570ab3f571 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -66,9 +66,10 @@ jobs: fail_fast: true steps: #@overlay/match by=overlay.subset({"get": "source"}) - - get: source - version: every + - version: every on_success: #@ pr_status(name, "pending") + #@overlay/match by=overlay.subset({"get": "version"}) + - trigger: false on_error: in_parallel: steps: From 18416fd5f96ded6dfd4d53d8f3cdb725c8dc26b5 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 16:48:41 -0800 Subject: [PATCH 102/155] Cleanup --- ci/pr/pipeline.yml | 15 ++++++------- ci/release/pipeline.yml | 50 ++++++++++++++++++++--------------------- 2 files changed, 32 insertions(+), 33 deletions(-) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 570ab3f571..7b9067b928 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -58,18 +58,17 @@ jobs: #@ for build in data.values.builds: #@ for config in data.values.configs: #@ name = build_job_name(build, config) - #@overlay/match by="name", missing_ok=True + #@overlay/match by="name" - name: #@ name plan: #@overlay/match by=overlay.index(0) - in_parallel: - fail_fast: true steps: - #@overlay/match by=overlay.subset({"get": "source"}) - - version: every + - #@overlay/match by=overlay.subset({"get": "source"}) + version: every on_success: #@ pr_status(name, "pending") - #@overlay/match by=overlay.subset({"get": "version"}) - - trigger: false + - #@overlay/match by=overlay.subset({"get": "version"}) + trigger: false on_error: in_parallel: steps: @@ -91,5 +90,5 @@ jobs: #@overlay/append - #@ pr_status(name, "success") - #@ end - #@ end + #@ end + #@ end diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index cf6e67b850..92817d384f 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -37,45 +37,45 @@ resources: - #@ registry_image_resource("packer-image", "hashicorp/packer") #@overlay/append - #@ git_resource("packer-source", data.values.repository.url, data.values.repository.branch, ["packer/*"]) - #@ for build in data.values.builds: + + #@ for build in data.values.builds: #@overlay/append - #@ gci_resource(build.source_image_family) - #@ for config in data.values.configs: - #@ for package in build.packages: + #@ for config in data.values.configs: + #@ for package in build.packages: #@overlay/append - #@ package_resource(build, config , package) - #@ end - #@ end - #@ end + #@ end + #@ end + #@ end +#@overlay/match-child-defaults missing_ok=True jobs: #@overlay/append - #@ version_source_job() #@overlay/append - #@ docker_job("task-image", "ci-source", "ci/docker/task") - #@ for build in data.values.builds: + + #@ for build in data.values.builds: #@overlay/append - #@ packer_job(build) - #@ for config in data.values.configs: - #@overlay/match by="name", missing_ok=True + #@ for config in data.values.configs: + #@overlay/match by="name" - name: #@ build_job_name(build, config) plan: #@overlay/match by=overlay.index(0) - in_parallel: steps: - #@overlay/match by=overlay.subset({"get": "version"}) - - get: version - #@overlay/match missing_ok=True + - #@overlay/match by=overlay.subset({"get": "version"}) passed: [ version-source ] - #@overlay/match by=overlay.subset({"get": "source"}) - - get: source - #@overlay/match missing_ok=True + - #@overlay/match by=overlay.subset({"get": "source"}) passed: [ version-source ] #@overlay/append - #@ put_package(build, config) - #@ end - #@ end - #@overlay/append + #@ end + #@ end + + #@overlay/append - name: github-release plan: - in_parallel: @@ -86,7 +86,7 @@ jobs: #@ for build in data.values.builds: #@ for config in data.values.configs: - #@ build_job_name(build, config) - #@ end + #@ end #@ end trigger: true - get: source @@ -94,7 +94,7 @@ jobs: #@ for build in data.values.builds: #@ for config in data.values.configs: - #@ build_job_name(build, config) - #@ end + #@ end #@ end trigger: true #@ for build in data.values.builds: @@ -104,12 +104,12 @@ jobs: passed: - #@ build_job_name(build, config) trigger: true - #@ end - #@ end - #@ end + #@ end + #@ end + #@ end - #@overlay/match-child-defaults missing_ok=True +#@overlay/match-child-defaults missing_ok=True groups: #@overlay/match by="name" - name: images @@ -123,7 +123,7 @@ groups: #@ for build in data.values.builds: #@overlay/append - #@ packer_job_name(build) - #@ end + #@ end #@overlay/append - #@ docker_job_name("task-image") #@overlay/append From 44d5acb947823bc703d6a7b5ab61b450e8d3acad Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 3 Dec 2020 16:56:45 -0800 Subject: [PATCH 103/155] Ignore some paths on PRs. --- ci/pr/pipeline.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ci/pr/pipeline.yml b/ci/pr/pipeline.yml index 7b9067b928..406a52318c 100644 --- a/ci/pr/pipeline.yml +++ b/ci/pr/pipeline.yml @@ -52,6 +52,12 @@ resources: repository: #@ parts[1] + "/" + parts[2] access_token: ((github-access-token)) base_branch: #@ data.values.repository.branch + ignore_paths: + - ci/ + - packer/ + - docker/ + - tools/ + - '*/*.md' #@overlay/match-child-defaults missing_ok=True jobs: From bd6d39e617fd1e7acdd311899b814ef38a8054e2 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 18 Dec 2020 22:51:21 -0800 Subject: [PATCH 104/155] Explicitly set MSVC toolset and Windows SDK. --- ci/base/base.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/base/base.yml b/ci/base/base.yml index 6f6dfacef3..42b12d4178 100644 --- a/ci/base/base.yml +++ b/ci/base/base.yml @@ -56,7 +56,7 @@ builds: with_dot_net: #@ True #@yaml/map-key-override params: - CMAKE_CONFIGURE_FLAGS: "-A x64 -Thost=x64" + CMAKE_CONFIGURE_FLAGS: "-A x64 -Tv141,version=14.15,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0" CMAKE_BUILD_FLAGS: "/m" CPACK_GENERATORS: "ZIP" From 479d93f8b5f11d507deeff77625e7e26a200239b Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 18 Dec 2020 22:51:38 -0800 Subject: [PATCH 105/155] Add VS 2019 image. --- ci/base/base.yml | 11 ++ packer/build-windows-2019-vs-2019.json | 157 +++++++++++++++++++ packer/windows/install-vs-2019-community.ps1 | 46 ++++++ 3 files changed, 214 insertions(+) create mode 100644 packer/build-windows-2019-vs-2019.json create mode 100644 packer/windows/install-vs-2019-community.ps1 diff --git a/ci/base/base.yml b/ci/base/base.yml index 42b12d4178..4fb335292c 100644 --- a/ci/base/base.yml +++ b/ci/base/base.yml @@ -60,6 +60,17 @@ builds: CMAKE_BUILD_FLAGS: "/m" CPACK_GENERATORS: "ZIP" + - _: #@ template.replace(new_build("windows-vs-2019", "Windows-64bit.zip")) + image_family: build-windows-2019-vs-2019 + source_image_family: windows-2019 + #@yaml/map-key-override + with_dot_net: #@ True + #@yaml/map-key-override + params: + CMAKE_CONFIGURE_FLAGS: "-A x64 -Tv141,version=14.16,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0" + CMAKE_BUILD_FLAGS: "/m" + CPACK_GENERATORS: "ZIP" + - _: #@ template.replace(new_build("rhel-7")) image_family: build-rhel-7 source_image_family: rhel-7 diff --git a/packer/build-windows-2019-vs-2019.json b/packer/build-windows-2019-vs-2019.json new file mode 100644 index 0000000000..0dc7617356 --- /dev/null +++ b/packer/build-windows-2019-vs-2019.json @@ -0,0 +1,157 @@ +{ + "variables": { + "aws_region": "", + "googlecompute_zone": "", + "googlecompute_project": "", + "image_family": "build-windows-2019-vs-2017", + "image_name_prefix": "{{user `image_family`}}" + }, + "builders": [ + { + "type": "amazon-ebs", + "instance_type": "c5d.2xlarge", + "ami_virtualization_type": "hvm", + "ami_name": "{{user `image_family`}}-{{timestamp}}", + "region": "{{user `aws_region`}}", + "source_ami_filter": { + "filters": { + "virtualization-type": "hvm", + "name": "Windows_Server-2019-English-Full-Base-*", + "root-device-type": "ebs" + }, + "owners": [ + "amazon" + ], + "most_recent": true + }, + "subnet_id": "{{user `subnet_id`}}", + "vpc_id": "{{user `vpc_id`}}", + "tags": { + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" + }, + "launch_block_device_mappings": [ + { + "device_name": "/dev/sda1", + "delete_on_termination": true, + "volume_size": 100 + } + ], + "user_data_file": "windows/2019/aws/startup.ps1", + "communicator": "winrm", + "winrm_username": "Administrator", + "winrm_insecure": true, + "winrm_use_ssl": true + }, + { + "type": "googlecompute", + "machine_type": "n1-standard-2", + "project_id": "{{user `googlecompute_project`}}", + "zone": "{{user `googlecompute_zone`}}", + "source_image_family": "windows-2019", + "image_name": "{{user `image_name_prefix`}}-{{timestamp}}", + "image_family": "{{user `image_family`}}", + "image_labels": { + "owner": "{{user `owner`}}", + "project": "{{user `project`}}", + "branch": "{{user `branch`}}" + }, + "disk_size": "100", + "metadata": { + "windows-startup-script-cmd": "dism /Online /NoRestart /Add-Capability /CapabilityName:OpenSSH.Server~~~~0.0.1.0 & dism /Online /NoRestart /Enable-Feature /All /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" + }, + "communicator": "winrm", + "winrm_username": "Administrator", + "winrm_insecure": true, + "winrm_use_ssl": true + } + ], + "provisioners": [ + { + "type": "powershell", + "scripts": [ + "windows/disable-uac.ps1", + "windows/2019/install-ssh.ps1", + "windows/install-chocolatey.ps1" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/install-vs-2019-community.ps1" + ] + }, + { + "type": "powershell", + "inline": [ + "choco install git.install -confirm", + "choco install cmake.portable -confirm", + "# TODO Old CLI tests aren't compatible with Java 11", + "choco install liberica8jdk -confirm", + "choco install doxygen.install -confirm", + "choco install openssl -confirm", + "choco install strawberryperl -confirm", + "choco install nuget.commandline -confirm", + "# TODO make this a nuget dependency", + "choco install nunit.install --version 2.6.4 -confirm" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/install-geode.ps1" + ] + }, + { + "type": "file", + "source": "windows/aws/init-ssh.ps1", + "destination": "$Env:ProgramData/ssh/init-ssh.ps1", + "only": [ + "amazon-ebs" + ] + }, + { + "type": "file", + "source": "windows/google/init-ssh.ps1", + "destination": "$Env:ProgramData/ssh/init-ssh.ps1", + "only": [ + "googlecompute" + ] + }, + { + "type": "powershell", + "inline": [ + ". $Env:ProgramData\\ssh\\init-ssh.ps1 -schedule" + ] + }, + { + "type": "powershell", + "scripts": [ + "windows/aws/setup-ec2launch.ps1" + ], + "only": [ + "amazon-ebs" + ] + }, + { + "type": "windows-restart", + "restart_timeout": "30m" + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" + ] + }, + { + "type": "powershell", + "inline": [ + "GCESysprep -NoShutdown" + ], + "only": [ + "googlecompute" + ] + } + ] +} diff --git a/packer/windows/install-vs-2019-community.ps1 b/packer/windows/install-vs-2019-community.ps1 new file mode 100644 index 0000000000..e860abcd8d --- /dev/null +++ b/packer/windows/install-vs-2019-community.ps1 @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# TODO AdminDeploy.xml +# vs_community.exe /AdminFile C:\Users\Administrator\AdminDeployment.xml /Log setup.log /Passive + +$ErrorActionPreference = "Stop" + +write-host "Installing Visual Studio 2019 Community..." + +$args = @('--add Microsoft.VisualStudio.Component.CoreEditor', + '--add Microsoft.VisualStudio.Workload.CoreEditor', + '--add Microsoft.Net.Component.4.6.1.TargetingPack', + '--add Microsoft.VisualStudio.Component.Roslyn.Compiler', + '--add Microsoft.Net.Component.4.8.SDK', + '--add Microsoft.Component.MSBuild', + '--add Microsoft.VisualStudio.Component.TextTemplating', + '--add Microsoft.Net.Component.4.5.2.TargetingPack', + '--add Microsoft.VisualStudio.Component.IntelliCode', + '--add Microsoft.VisualStudio.Component.VC.CoreIde', + '--add Microsoft.VisualStudio.Component.VC.Redist.14.Latest', + '--add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core', + '--add Microsoft.VisualStudio.Component.Windows10SDK.16299', + '--add Microsoft.VisualStudio.Component.VC.v141.x86.x64', + '--add Microsoft.VisualStudio.Workload.NativeDesktop', + '--add Microsoft.VisualStudio.Component.VC.v141.CLI.Support', + '--add Microsoft.Net.Component.4.6.1.SDK', + '--quiet') + +choco install visualstudio2019community -confirm --package-parameters "$args" + +write-host "Installed Visual Studio 2019 Community." + +# Avoids reboot error code +Exit 0 From e08f3a9f3abbeb0df94f67dd6ad71e68ea93cfad Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Fri, 18 Dec 2020 23:01:55 -0800 Subject: [PATCH 106/155] Add VS 2019 image. --- ci/base/base.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ci/base/base.yml b/ci/base/base.yml index 4fb335292c..c785cf809b 100644 --- a/ci/base/base.yml +++ b/ci/base/base.yml @@ -49,18 +49,18 @@ google: key: geode-native/develop builds: - - _: #@ template.replace(new_build("windows", "Windows-64bit.zip")) - image_family: build-windows-2019-vs-2017 - source_image_family: windows-2019 - #@yaml/map-key-override - with_dot_net: #@ True - #@yaml/map-key-override - params: - CMAKE_CONFIGURE_FLAGS: "-A x64 -Tv141,version=14.15,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0" - CMAKE_BUILD_FLAGS: "/m" - CPACK_GENERATORS: "ZIP" +#! - _: #@ template.replace(new_build("windows", "Windows-64bit.zip")) +#! image_family: build-windows-2019-vs-2017 +#! source_image_family: windows-2019 +#! #@yaml/map-key-override +#! with_dot_net: #@ True +#! #@yaml/map-key-override +#! params: +#! CMAKE_CONFIGURE_FLAGS: "-A x64 -Tv141,version=14.15,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0" +#! CMAKE_BUILD_FLAGS: "/m" +#! CPACK_GENERATORS: "ZIP" - - _: #@ template.replace(new_build("windows-vs-2019", "Windows-64bit.zip")) + - _: #@ template.replace(new_build("windows", "Windows-64bit.zip")) image_family: build-windows-2019-vs-2019 source_image_family: windows-2019 #@yaml/map-key-override From 330a1b3a95f181832c3791e6656344617d32cdb7 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 19 Dec 2020 06:50:17 -0800 Subject: [PATCH 107/155] Add VS 2019 image. --- packer/windows/install-vs-2019-community.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/packer/windows/install-vs-2019-community.ps1 b/packer/windows/install-vs-2019-community.ps1 index e860abcd8d..460dfd143f 100644 --- a/packer/windows/install-vs-2019-community.ps1 +++ b/packer/windows/install-vs-2019-community.ps1 @@ -31,6 +31,7 @@ $args = @('--add Microsoft.VisualStudio.Component.CoreEditor', '--add Microsoft.VisualStudio.Component.VC.CoreIde', '--add Microsoft.VisualStudio.Component.VC.Redist.14.Latest', '--add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core', + '--add Microsoft.VisualStudio.Component.VC.ATL', '--add Microsoft.VisualStudio.Component.Windows10SDK.16299', '--add Microsoft.VisualStudio.Component.VC.v141.x86.x64', '--add Microsoft.VisualStudio.Workload.NativeDesktop', From 858e08998878595160a59421641846fb452d70be Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 19 Dec 2020 06:50:30 -0800 Subject: [PATCH 108/155] Debug build script --- ci/lib/templates.lib.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 375a7ec42a..7b74d15cd6 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -215,7 +215,7 @@ config: - -c #@yaml/text-templated-strings - | - set -ueo pipefail + set -xueo pipefail (@= remote_functions() @) From f05212b93a6423e427760dd1c3f2b7f02b25a25a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 15:46:43 +0000 Subject: [PATCH 109/155] Update to boost 1.75.0 and fix MSVC --- dependencies/boost/CMakeLists.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dependencies/boost/CMakeLists.txt b/dependencies/boost/CMakeLists.txt index e0e1bb9436..dd79b07f52 100644 --- a/dependencies/boost/CMakeLists.txt +++ b/dependencies/boost/CMakeLists.txt @@ -57,9 +57,10 @@ endif() if ("SunOS" STREQUAL ${CMAKE_SYSTEM_NAME}) list(APPEND BOOTSTRAP_FLAGS --with-toolset=sun) list(APPEND B2_FLAGS define=BOOST_OPTIONAL_DETAIL_NO_RVALUE_REFERENCES) -elseif ("Windows" STREQUAL ${CMAKE_SYSTEM_NAME}) +elseif(MSVC) + list(APPEND BOOTSTRAP_FLAGS vc${MSVC_TOOLSET_VERSION}) + list(APPEND B2_FLAGS toolset=msvc-${MSVC_TOOLSET_VERSION}) set(CMAKE_STATIC_LIBRARY_PREFIX lib) - list(APPEND B2_FLAGS toolset=msvc-14.1) elseif ("Clang" STREQUAL ${CMAKE_CXX_COMPILER_ID}) list(APPEND BOOTSTRAP_FLAGS --with-toolset=clang) endif() From be29ef0e504153755cd88aeb3dc460a7d389ad1f Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 09:41:10 -0800 Subject: [PATCH 110/155] Remove used windows build scripts. --- bin/vcvarsall.bat | 20 -------------------- bin/vcvarsall.ps1 | 30 ------------------------------ 2 files changed, 50 deletions(-) delete mode 100644 bin/vcvarsall.bat delete mode 100644 bin/vcvarsall.ps1 diff --git a/bin/vcvarsall.bat b/bin/vcvarsall.bat deleted file mode 100644 index df53c10d48..0000000000 --- a/bin/vcvarsall.bat +++ /dev/null @@ -1,20 +0,0 @@ -@echo off - -rem Licensed to the Apache Software Foundation (ASF) under one or more -rem contributor license agreements. See the NOTICE file distributed with -rem this work for additional information regarding copyright ownership. -rem The ASF licenses this file to You under the Apache License, Version 2.0 -rem (the "License"); you may not use this file except in compliance with -rem the License. You may obtain a copy of the License at -rem -rem http://www.apache.org/licenses/LICENSE-2.0 -rem -rem Unless required by applicable law or agreed to in writing, software -rem distributed under the License is distributed on an "AS IS" BASIS, -rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -rem See the License for the specific language governing permissions and -rem limitations under the License. - -call "c:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build\vcvarsall.bat" %1 -C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\VC\Auxiliary\Build -echo Environment setup for %vs% %1. diff --git a/bin/vcvarsall.ps1 b/bin/vcvarsall.ps1 deleted file mode 100644 index ce91281faa..0000000000 --- a/bin/vcvarsall.ps1 +++ /dev/null @@ -1,30 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -$arch = $args[0] -if ([string]::IsNullOrEmpty($arch)) { - $arch = "amd64" -} - -$basedir = split-path $SCRIPT:MyInvocation.MyCommand.Path -parent - -cmd /c "$basedir\vcvarsall.bat $arch & set" | -foreach { - if ($_ -match "=") { - $v = $_.split("="); set-item -force -path "ENV:\$($v[0])" -value "$($v[1])" - } -} - -write-host "Environment setup for VS $env:VisualStudioVersion $arch." -ForegroundColor Yellow From c3001e981791f3be5481acef4c46917808827aa8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 09:43:33 -0800 Subject: [PATCH 111/155] Fixes Windows 2016 --- packer/build-windows-2016-vs-2017.json | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index 2c5abc8f6c..2f39df4f17 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -134,6 +134,16 @@ "amazon-ebs" ] }, + { + "type": "windows-restart", + "restart_timeout": "30m" + }, + { + "type": "powershell", + "scripts": [ + "windows/cleanup.ps1" + ] + }, { "type": "powershell", "inline": [ @@ -142,12 +152,6 @@ "only": [ "googlecompute" ] - }, - { - "type": "powershell", - "scripts": [ - "windows/cleanup.ps1" - ] } ] } From f01c5fe362e3f28368c6162dad3d077639bd7cf5 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 09:50:54 -0800 Subject: [PATCH 112/155] Pit VS 2017 against VS 2019 --- ci/base/base.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ci/base/base.yml b/ci/base/base.yml index c785cf809b..38cf1c3b54 100644 --- a/ci/base/base.yml +++ b/ci/base/base.yml @@ -49,18 +49,18 @@ google: key: geode-native/develop builds: -#! - _: #@ template.replace(new_build("windows", "Windows-64bit.zip")) -#! image_family: build-windows-2019-vs-2017 -#! source_image_family: windows-2019 -#! #@yaml/map-key-override -#! with_dot_net: #@ True -#! #@yaml/map-key-override -#! params: -#! CMAKE_CONFIGURE_FLAGS: "-A x64 -Tv141,version=14.15,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0" -#! CMAKE_BUILD_FLAGS: "/m" -#! CPACK_GENERATORS: "ZIP" + - _: #@ template.replace(new_build("windows-2016-vs-2017", "Windows-64bit.zip")) + image_family: build-windows-2016-vs-2017 + source_image_family: windows-2016 + #@yaml/map-key-override + with_dot_net: #@ True + #@yaml/map-key-override + params: + CMAKE_CONFIGURE_FLAGS: "-A x64 -Tv141,version=14.16,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0" + CMAKE_BUILD_FLAGS: "/m" + CPACK_GENERATORS: "ZIP" - - _: #@ template.replace(new_build("windows", "Windows-64bit.zip")) + - _: #@ template.replace(new_build("windows-2019-vs-2019", "Windows-64bit.zip")) image_family: build-windows-2019-vs-2019 source_image_family: windows-2019 #@yaml/map-key-override From 1ab4ddd70dcaac9a621457619fa18eec54ef0f30 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 11:09:15 -0800 Subject: [PATCH 113/155] Package icon --- ci/lib/templates.lib.yml | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 7b74d15cd6..c7ed996197 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -573,12 +573,15 @@ plan: --- #@ def package_resource(build, config, package): -name: #@ package_resource_name(build, config, package) -type: gcs-resource -source: - bucket: #@ data.values.google.storage.bucket - json_key: ((gcr-json-key)) - regexp: #@ data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/" + package.regexp +#@ return resource( +#@ package_resource_name(build, config, package), +#@ "gcs-resource", +#@ { +#@ "bucket": data.values.google.storage.bucket, +#@ "json_key": "((gcr-json-key))", +#@ "regexp": data.values.google.storage.key + "/packages/" + build.name + "/" + config.name + "/" + package.regexp +#@ }, +#@ "content-save") #@ end --- From 883836e0a0e78fb1d83407c35c717b72c57c15a0 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 13:14:03 -0800 Subject: [PATCH 114/155] Boost and building cleanup. --- BUILDING.md | 99 ++++++++++++++++++++++--------- dependencies/boost/CMakeLists.txt | 19 +++--- 2 files changed, 79 insertions(+), 39 deletions(-) diff --git a/BUILDING.md b/BUILDING.md index db793c5345..63aabd3ff0 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -1,6 +1,7 @@ # Building ## Prerequisites (All Platforms) + * [CMake 3.12](https://cmake.org/) or newer * C++11 compiler *(see platform specific requirements)* * [Doxygen 1.8.11 or greater](https://sourceforge.net/projects/doxygen/) *(for building source documentation)* @@ -17,13 +18,13 @@ * [Mac OS X](#mac-os-x) * [Solaris](#solaris) - ## Setting Path to Geode + Building requires access to an installation of Geode. There are two ways to achieve this: * Set an environment variable called `GEODE_HOME` that points to your Geode installation path. * Pass in `GEODE_ROOT` during the CMake configuration step. - * e.g. add `-DGEODE_ROOT=/path/to/geode` to the _initial_ `cmake` execution command. + * e.g. add `-DGEODE_ROOT=/path/to/geode` to the _initial_ `cmake` execution command. ## Steps to build @@ -36,16 +37,25 @@ $ cmake .. # build step $ cmake --build . -- ``` -If OpenSSL is installed in a custom location, then you must pass `OPENSSL_ROOT_DIR` during the CMake configuration step. For example, `-DOPENSSL_ROOT_DIR=/path/to/openssl`. -To explicitly specify the location in which the Native Client will be installed, add `-DCMAKE_INSTALL_PREFIX=/path/to/installation/destination` to this initial `cmake` execution command. +If OpenSSL is installed in a custom location, then you must pass `OPENSSL_ROOT_DIR` during the CMake configuration step. +For example, `-DOPENSSL_ROOT_DIR=/path/to/openssl`. + +To explicitly specify the location in which the Native Client will be installed, +add `-DCMAKE_INSTALL_PREFIX=/path/to/installation/destination` to this initial `cmake` execution command. -To set the version header on the API docs, specify PRODUCT_VERSION on the configuration command line. For example, `-DPRODUCT_VERSION=1.2.3`. +To set the version header on the API docs, specify PRODUCT_VERSION on the configuration command line. For +example, `-DPRODUCT_VERSION=1.2.3`. ### Generator -CMake uses a "generator" to produce configuration files for use by a variety of build tools, e.g., UNIX makefiles, Visual Studio projects. By default a system-specific generator is used by CMake during configuration. (Please see [the CMake documentation](https://cmake.org/documentation/) for further information.) However, in many cases there is a better choice. + +CMake uses a "generator" to produce configuration files for use by a variety of build tools, e.g., UNIX makefiles, +Visual Studio projects. By default a system-specific generator is used by CMake during configuration. (Please +see [the CMake documentation](https://cmake.org/documentation/) for further information.) However, in many cases there +is a better choice. #### CLion / Eclipse / Other + The recommended generator for most unix platforms is 'Makefiles' (default): ```console @@ -55,10 +65,12 @@ $ cmake .. #### Mac OSX Xcode Install XCode from the App Store + * You have to run XCode once to get it initialize properly (software agreement). * Install the command line tools for xcode - run `xcode-select --install` from terminal -Install the required dependencies through homebrew. If you use another package manager for your mac feel free to use that. +Install the required dependencies through homebrew. If you use another package manager for your mac feel free to use +that. ```bash $ brew install geode @@ -67,35 +79,33 @@ $ brew install doxygen $ brew install cmake ``` -Follow these steps to build the geode native client. The recommended code generator is `Xcode`. +You will need to provide the path to the brew installed OpenSSL headers since macOS already has a system installed +version but without the required headers. ```bash -$ cd -$ mkdir build -$ cd build -$ cmake .. -G "Xcode" -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DCMAKE_INSTALL_PREFIX=`pwd`/install -$ cmake --build . --target docs -$ cmake --build . --target install -j8 +$ cmake .. -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl ``` -At the end of the process the geode native client will be in the `/build/install` directory. - #### Windows / Visual Studio -When running cmake commands on Windows, be sure to use [Visual Studio Native Tools Command Prompt](https://msdn.microsoft.com/en-us/library/f35ctcxw.aspx) so environment variables are set properly. -The recommended generator on Windows is `Visual Studio 15 2017 Win64`: +For Visual Studio 2017 and newer you only need to specify the correct architecture, toolset and SDK for Windows. To +build a 64-bit library using the 64 bit toolset version 14.1 with minimum ABI compatibility of 14.16 for minimum Windows +version 10.0.16299.0 use the following options. ```console -$ cmake .. -G "Visual Studio 15 2017 Win64" -Thost=x64 +$ cmake .. -A x64 -Tv141,version=14.16,host=x64 -DCMAKE_SYSTEM_VERSION=10.0.16299.0 ``` -Visual Studio 2019 is also supported. For this generator you must leave off the Win64: +At a bare minimum you will likely need to specify the architecture, since MSVC still defaults to 32 bit, and the 64 bit +version of the toolset, because of large object files. The latest toolset version and Windows SDK will likely get picked +up. ```console -$ cmake .. -G "Visual Studio 16 2019" -Thost=x64 +$ cmake .. -A x64 -Thost=x64 ``` ### Build Parallelism + For faster builds, use optional parallelism parameters in the last build step: #### Unix @@ -122,12 +132,16 @@ $ cmake … -DWITH_IPV6=ON … #### Code Coverage -If building with GCC or Clang you can enable C++ code coverage by adding `-DUSE_CPP_COVERAGE=ON` to the CMake [Generator](#generator) command. +If building with GCC or Clang you can enable C++ code coverage by adding `-DUSE_CPP_COVERAGE=ON` to the +CMake [Generator](#generator) command. ```console $ cmake … -DUSE_CPP_COVERAGE=ON … ``` -You can then generate a C++ code coverage report by downloading [lcov](http://ltp.sourceforge.net/coverage/lcov.php). After acquiring lcov, finish the [Steps to build](#Steps-to-build) section above. Then, run the tests as described in the [CONTRIBUTING.md](CONTRIBUTING.md). Finally, run the following commands from the `build` directory: + +You can then generate a C++ code coverage report by downloading [lcov](http://ltp.sourceforge.net/coverage/lcov.php). +After acquiring lcov, finish the [Steps to build](#Steps-to-build) section above. Then, run the tests as described in +the [CONTRIBUTING.md](CONTRIBUTING.md). Finally, run the following commands from the `build` directory: ```console $ lcov --capture --directory . --output-file coverage.info @@ -137,24 +151,34 @@ $ genhtml coverage.info --output-directory coverage_report You can then open the `index.html` file in the `coverage_report` directory using any browser. #### Clang-Tidy + To enable `clang-tidy`: ```console $ cmake … -DCMAKE_CXX_CLANG_TIDY=clang-tidy … ``` + To use specific `clang-tidy`: ```console $ cmake … -DCMAKE_CXX_CLANG_TIDY=/path/to/clang-tidy … ``` + By default `clang-tidy` uses the configuration found in `.clang-tidy` To override `clang-tidy` options: ```console $ cmake … -DCMAKE_CXX_CLANG_TIDY=clang-tidy; … ``` + #### Clang-format -Individual targets in the build tree have their own dependency of the form `<>-clangformat`, which uses the `clang-format` executable, wherever it is found, to format and modified files according to the rules specfied in the .clang-format file. This is helpful when submitting changes to geode-native, because an improperly formatted file will fail Travis-CI and have to be fixed prior to merging any pull request. If clang-format is not installed on your system, clangformat targets will not be added to your project files, and geode-native should build normally. Under some circumstances, however, it may become necessary to disable `clang-format` on a system where it _is_ installed. + +Individual targets in the build tree have their own dependency of the form `<>-clangformat`, which uses +the `clang-format` executable, wherever it is found, to format and modified files according to the rules specfied in the +.clang-format file. This is helpful when submitting changes to geode-native, because an improperly formatted file will +fail Travis-CI and have to be fixed prior to merging any pull request. If clang-format is not installed on your system, +clangformat targets will not be added to your project files, and geode-native should build normally. Under some +circumstances, however, it may become necessary to disable `clang-format` on a system where it _is_ installed. To disable `clang-format` in the build: @@ -162,7 +186,10 @@ To disable `clang-format` in the build: $ cmake … -DClangFormat_EXECUTABLE='' … ``` -On the other hand, it may also be desirable to run clang-format on the entire source tree. This is also easily done via the `all-clangformat` _in a build with clang-format enabled_. If clang-format has been disabled in the cmake configuration step, as above, the `all-clangformat` target will not exist, and the cmake configuration step will have to be re-run with clang-format enabled. +On the other hand, it may also be desirable to run clang-format on the entire source tree. This is also easily done via +the `all-clangformat` _in a build with clang-format enabled_. If clang-format has been disabled in the cmake +configuration step, as above, the `all-clangformat` target will not exist, and the cmake configuration step will have to +be re-run with clang-format enabled. To run clang-format on the entire source tree: @@ -170,12 +197,15 @@ To run clang-format on the entire source tree: $ cmake --build . --target all-clangformat ``` - ## Installing -By default a system-specific location is used by CMake as the destination of the `install` target, e.g., `/usr/local` on UNIX system. To explicitly specify the location in which the Native Client will be installed, add `-DCMAKE_INSTALL_PREFIX=/path/to/installation/destination` to the _initial_ `cmake` execution command. -**Note:** For consistent results, avoid using the "~" (tilde) abbreviation when specifying paths on the CMake command line. -Interpretation of the symbol varies depending on the option being specified, and on the system or command shell in use. +By default a system-specific location is used by CMake as the destination of the `install` target, e.g., `/usr/local` on +UNIX system. To explicitly specify the location in which the Native Client will be installed, +add `-DCMAKE_INSTALL_PREFIX=/path/to/installation/destination` to the _initial_ `cmake` execution command. + +**Note:** For consistent results, avoid using the "~" (tilde) abbreviation when specifying paths on the CMake command +line. Interpretation of the symbol varies depending on the option being specified, and on the system or command shell in +use. Due to limitations in CMake, the documentation must be built as a separate step before installation: @@ -189,6 +219,7 @@ $ cmake --build . --target install # Platform-Specific Prerequisites ## Windows + * Windows 8.1 64-bit * Windows 10 64-bit * Windows Server 2012 R2 64-bit @@ -196,42 +227,52 @@ $ cmake --build . --target install * NUnit 2.6.4 (to run clicache tests) ### Required Tools + * [Visual Studio 2015](https://www.visualstudio.com) or newer * .NET 4.5.2 or later * Chocolatey * [Other dependencies installed via Powershell](packer/windows/install-dependencies.ps1) ## Linux + * RHEL/CentOS 6 * RHEL/CentOS 7 * SLES 11 * SLES 12 ### Required Tools + * [GCC 5](https://gcc.gnu.org) or newer ### Optional Tools + * [Eclipse CDT 8.8](https://eclipse.org/cdt/) or newer ## Mac OS X + * Mac OS X 10.12 (Sierra) or newer * Xcode 8.2 or newer ### Required Tools + * [Xcode](https://developer.apple.com/xcode/download/) * Xcode command line developer tools + ```console $ xcode-select --install ``` ### Optional Tools + * [CMake GUI](https://cmake.org/) * [Doxygen GUI](http://ftp.stack.nl/pub/users/dimitri/Doxygen-1.8.11.dmg) * [CLion](https://www.jetbrains.com/clion/) ## Solaris + * Solaris 11 SPARC * Solaris 11 x86 ### Required Tools + * [Solaris Studio 12.6](http://www.oracle.com/technetwork/server-storage/developerstudio/downloads/index.html) or newer diff --git a/dependencies/boost/CMakeLists.txt b/dependencies/boost/CMakeLists.txt index dd79b07f52..38f8196045 100644 --- a/dependencies/boost/CMakeLists.txt +++ b/dependencies/boost/CMakeLists.txt @@ -13,17 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -project( boost VERSION 1.75.0 LANGUAGES NONE ) +project(boost VERSION 1.75.0 LANGUAGES NONE) -set( SHA256 aeb26f80e80945e82ee93e5939baebdca47b9dee80a07d3144be1e1a6a66dd6a ) +set(SHA256 aeb26f80e80945e82ee93e5939baebdca47b9dee80a07d3144be1e1a6a66dd6a) if (WIN32) - set(BOOTSTRAP_COMMAND .\\bootstrap.bat) - set(B2_COMMAND .\\b2) + set(BOOTSTRAP_COMMAND ./bootstrap.bat) else() set(BOOTSTRAP_COMMAND ./bootstrap.sh) - set(B2_COMMAND ./b2) endif() +set(B2_COMMAND ./b2) set(B2_FLAGS -d0 --prefix=/$ @@ -59,7 +58,7 @@ if ("SunOS" STREQUAL ${CMAKE_SYSTEM_NAME}) list(APPEND B2_FLAGS define=BOOST_OPTIONAL_DETAIL_NO_RVALUE_REFERENCES) elseif(MSVC) list(APPEND BOOTSTRAP_FLAGS vc${MSVC_TOOLSET_VERSION}) - list(APPEND B2_FLAGS toolset=msvc-${MSVC_TOOLSET_VERSION}) + list(APPEND B2_FLAGS toolset=msvc-${CMAKE_VS_PLATFORM_TOOLSET_VERSION}) set(CMAKE_STATIC_LIBRARY_PREFIX lib) elseif ("Clang" STREQUAL ${CMAKE_CXX_COMPILER_ID}) list(APPEND BOOTSTRAP_FLAGS --with-toolset=clang) @@ -68,10 +67,10 @@ endif() list(APPEND B2_FLAGS "cxxflags=${CMAKE_CXX_FLAGS} ${CMAKE_CXX11_STANDARD_COMPILE_OPTION} ${CMAKE_CXX_COMPILE_OPTIONS_PIC}") string(REPLACE "." "_" _VERSION_UNDERSCORE ${PROJECT_VERSION}) -set( EXTERN ${PROJECT_NAME}-extern ) +set(EXTERN ${PROJECT_NAME}-extern) include(ExternalProject) -ExternalProject_Add( ${EXTERN} +ExternalProject_Add(${EXTERN} URL "https://dl.bintray.com/boostorg/release/${PROJECT_VERSION}/source/boost_${_VERSION_UNDERSCORE}.tar.gz" "https://sourceforge.net/projects/boost/files/boost/${PROJECT_VERSION}/boost_${_VERSION_UNDERSCORE}.tar.gz/download" URL_HASH SHA256=${SHA256} @@ -86,8 +85,8 @@ ExternalProject_Add( ${EXTERN} STAMP_DIR ./stamp ) -ExternalProject_Get_Property( ${EXTERN} SOURCE_DIR ) -ExternalProject_Get_Property( ${EXTERN} INSTALL_DIR ) +ExternalProject_Get_Property(${EXTERN} SOURCE_DIR) +ExternalProject_Get_Property(${EXTERN} INSTALL_DIR) set(INSTALL_DIR "${INSTALL_DIR}/$") function(ADD_BOOST_LIBRARY) From cc179e862b85136e1285f9f5ce7c6f8b5de6264e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 13:36:35 -0800 Subject: [PATCH 115/155] Don't checkout sources after upload. --- ci/lib/templates.lib.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index c7ed996197..61ea4e8605 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -228,7 +228,6 @@ config: builddate=$(date "+%Y-%m-%d") remote_upload source . - remote_shell cmake -E chdir source git checkout ${git_rev} remote_shell cmake -E make_directory build remote_shell cmake -E chdir build cmake ../source ${CMAKE_CONFIGURE_FLAGS} \ -DCMAKE_BUILD_TYPE=${CMAKE_CONFIG} \ From 7c0cd7fad0b989e108bd528cf5859b7d6f5cf026 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 15:41:12 -0800 Subject: [PATCH 116/155] Fixes for ACE --- cppcache/CMakeLists.txt | 1 - dependencies/ACE/CMakeLists.txt | 69 ++++++++++++--------------------- 2 files changed, 25 insertions(+), 45 deletions(-) diff --git a/cppcache/CMakeLists.txt b/cppcache/CMakeLists.txt index 1c114874b5..dad7bb50bc 100644 --- a/cppcache/CMakeLists.txt +++ b/cppcache/CMakeLists.txt @@ -88,7 +88,6 @@ endif() target_link_libraries(_apache-geode INTERFACE ACE::ACE - ACE::ACE_SSL Boost::boost Boost::filesystem Boost::thread diff --git a/dependencies/ACE/CMakeLists.txt b/dependencies/ACE/CMakeLists.txt index 08b4518ae1..fcedfb40b7 100644 --- a/dependencies/ACE/CMakeLists.txt +++ b/dependencies/ACE/CMakeLists.txt @@ -47,8 +47,7 @@ if (NOT DEFINED ACE_PLATFORM) message( FATAL_ERROR "ACE_PLATFORM unset for ${CMAKE_SYSTEM_NAME}" ) endif() -find_package(OpenSSL REQUIRED) -set( OPENSSL_ROOT ${OPENSSL_INCLUDE_DIR}/.. ) +set(ENV_COMMAND ${CMAKE_COMMAND} -E env ACE_ROOT=) if (${WIN32}) if (64 EQUAL ${BUILD_BITS}) @@ -56,51 +55,49 @@ if (${WIN32}) else() set( _PLATFORM win32 ) endif() - if (MSVC_VERSION EQUAL 1900) - set(_TYPE "vc14") - elseif((MSVC_VERSION GREATER_EQUAL 1910) AND (MSVC_VERSION LESS_EQUAL 1919)) - set(_TYPE "vs2017") - else() - set(_TYPE "vs2019") + if (MSVC_TOOLSET_VERSION EQUAL 140) + set(MPC_TYPE "vc14") + elseif(MSVC_TOOLSET_VERSION EQUAL 141) + set(MPC_TYPE "vs2017") + elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 142) + set(MPC_TYPE "vs2019") endif() set ( _COMMAND_PREFIX ${CMAKE_COMMAND} -E chdir ace ) set ( _MSBUILD_FLAGS /m /p:Platform=${_PLATFORM} /p:Configuration=$<$:Debug>$<$>:Release>) - set ( _MSBUILD_FLAGS_STATIC ${_MSBUILD_FLAGS} /t:ace_${_TYPE}_static /t:SSL_${_TYPE}_static ) + set ( _MSBUILD_FLAGS_STATIC ${_MSBUILD_FLAGS} /t:ace_${MPC_TYPE}_static ) include(ProcessorCount) ProcessorCount(_NPROCS) find_package(Perl REQUIRED) - set ( MPC ${PERL_EXECUTABLE} ..\\bin\\mwc.pl ) - set ( MPC_FLAGS -type ${_TYPE} -expand_vars -features ssl=1 -recurse -hierarchy -workers ${_NPROCS} -value_template MultiProcessorCompilation=true -apply_project ) + set ( MPC ${PERL_EXECUTABLE} ../bin/mwc.pl ) + set ( MPC_FLAGS -type ${MPC_TYPE} -recurse -hierarchy -workers ${_NPROCS} -apply_project ) #TODO add custom targets to build in parallel? #TODO MPC has flag to replace variables with absolute paths, think we need that for devstudio builds - set ( _CONFIGURE_COMMAND ${_COMMAND_PREFIX} - ${CMAKE_COMMAND} -E env ACE_ROOT= SSL_ROOT=${OPENSSL_ROOT} - ${MPC} ${MPC_FLAGS} -name_modifier "*_${_TYPE}_static" -static - -value_template staticflags+=__ACE_INLINE__ - -value_template staticflags+=ACE_BUILD_DLL - -value_template staticflags+=ACE_AS_STATIC_LIBS - -value_template runtime_library=Multithreaded$<$:Debug>Dll + set ( _CONFIGURE_COMMAND ${MPC} -static ${MPC_FLAGS} + -name_modifier "*_${MPC_TYPE}_static" + -value_template MultiProcessorCompilation=true + -value_template WindowsTargetPlatformVersion=${CMAKE_SYSTEM_VERSION} + -value_template staticflags+=__ACE_INLINE__ + -value_template staticflags+=ACE_BUILD_DLL + -value_template staticflags+=ACE_AS_STATIC_LIBS + -value_template runtime_library=Multithreaded$<$:Debug>Dll ) - set ( _BUILD_COMMAND ${_COMMAND_PREFIX} ${CMAKE_VS_MSBUILD_COMMAND} ACE_${_TYPE}_static.sln ${_MSBUILD_FLAGS} ${_MSBUILD_FLAGS_STATIC} + set ( _BUILD_COMMAND ${CMAKE_VS_MSBUILD_COMMAND} ACE_${MPC_TYPE}_static.sln ${_MSBUILD_FLAGS} ${_MSBUILD_FLAGS_STATIC} ) - set ( _INSTALL_COMMAND ${CMAKE_COMMAND} -E copy_directory lib /lib - COMMAND ${CMAKE_COMMAND} -E copy_directory ace /include/ace + set ( _INSTALL_COMMAND ${CMAKE_COMMAND} -E copy_if_different ../lib /lib + COMMAND ${CMAKE_COMMAND} -E copy_if_different ../ace /include/ace ) set(CMAKE_STATIC_LIBRARY_SUFFIX s$<${MSVC}:$<$:d>>.lib) else() # TODO Configure trips up without MAKE - # TODO look into using cmake -E chdir - # TODO use cmake -E env - - set( _MAKE cd ace && ACE_ROOT= SSL_ROOT=${OPENSSL_ROOT} $(MAKE) SHELL=/bin/bash debug=$ optimize=$> buildbits=${BUILD_BITS} c++11=1 static_libs_only=1 ssl=1 ${_MAKE_ARGS}) + set( _MAKE $(MAKE) SHELL=/bin/bash debug=$ optimize=$> buildbits=${BUILD_BITS} c++11=1 static_libs_only=1 ${_MAKE_ARGS}) set ( _CONFIGURE_COMMAND "" ) set ( _BUILD_COMMAND ${_MAKE} all ) set ( _INSTALL_COMMAND ${_MAKE} install ) @@ -114,10 +111,11 @@ ExternalProject_Add( ${EXTERN} URL "https://github.com/DOCGroup/ACE_TAO/releases/download/ACE%2BTAO-${_VERSION_UNDERSCORE}/ACE-${PROJECT_VERSION}.tar.gz" URL_HASH SHA256=${SHA256} UPDATE_COMMAND "" + SOURCE_SUBDIR "ace" BUILD_IN_SOURCE 1 CONFIGURE_COMMAND "${_CONFIGURE_COMMAND}" - BUILD_COMMAND "${_BUILD_COMMAND}" - INSTALL_COMMAND "${_INSTALL_COMMAND}" + BUILD_COMMAND ${ENV_COMMAND} ${_BUILD_COMMAND} + INSTALL_COMMAND ${ENV_COMMAND} ${_INSTALL_COMMAND} DEPENDS ${DEPENDS} ) @@ -201,21 +199,4 @@ endif() add_dependencies(ACE_ACE ${EXTERN}) -add_library(ACE_ACE_SSL INTERFACE) -target_link_libraries(ACE_ACE_SSL INTERFACE - ${INSTALL_DIR}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}ACE_SSL${CMAKE_STATIC_LIBRARY_SUFFIX} - ACE_ACE - OpenSSL::SSL - OpenSSL::Crypto -) - -list(LENGTH CMAKE_OSX_ARCHITECTURES len) -if (len GREATER 1) - target_link_options(ACE_ACE_SSL INTERFACE - -undefined dynamic_lookup # remove after OpenSSL goes universal - ) -endif() -unset(len) - add_library(ACE::ACE ALIAS ACE_ACE) -add_library(ACE::ACE_SSL ALIAS ACE_ACE_SSL) From 6e8267593e3d6350301e2fcb3774909a723cfa35 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 21 Dec 2020 16:46:10 -0800 Subject: [PATCH 117/155] Fixes for ACE --- dependencies/ACE/CMakeLists.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dependencies/ACE/CMakeLists.txt b/dependencies/ACE/CMakeLists.txt index fcedfb40b7..226b7e451e 100644 --- a/dependencies/ACE/CMakeLists.txt +++ b/dependencies/ACE/CMakeLists.txt @@ -89,8 +89,8 @@ if (${WIN32}) ) set ( _BUILD_COMMAND ${CMAKE_VS_MSBUILD_COMMAND} ACE_${MPC_TYPE}_static.sln ${_MSBUILD_FLAGS} ${_MSBUILD_FLAGS_STATIC} ) - set ( _INSTALL_COMMAND ${CMAKE_COMMAND} -E copy_if_different ../lib /lib - COMMAND ${CMAKE_COMMAND} -E copy_if_different ../ace /include/ace + set ( _INSTALL_COMMAND ${CMAKE_COMMAND} -E copy_directory /lib /lib + COMMAND ${CMAKE_COMMAND} -E copy_directory /ace /include/ace ) set(CMAKE_STATIC_LIBRARY_SUFFIX s$<${MSVC}:$<$:d>>.lib) From eeaa173b3a439fd14c543a5c331e3b20572be113 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 22 Dec 2020 10:33:38 -0800 Subject: [PATCH 118/155] Fixes boost on windows --- dependencies/boost/CMakeLists.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dependencies/boost/CMakeLists.txt b/dependencies/boost/CMakeLists.txt index 38f8196045..11f20b57a1 100644 --- a/dependencies/boost/CMakeLists.txt +++ b/dependencies/boost/CMakeLists.txt @@ -58,7 +58,11 @@ if ("SunOS" STREQUAL ${CMAKE_SYSTEM_NAME}) list(APPEND B2_FLAGS define=BOOST_OPTIONAL_DETAIL_NO_RVALUE_REFERENCES) elseif(MSVC) list(APPEND BOOTSTRAP_FLAGS vc${MSVC_TOOLSET_VERSION}) - list(APPEND B2_FLAGS toolset=msvc-${CMAKE_VS_PLATFORM_TOOLSET_VERSION}) + if (CMAKE_VS_PLATFORM_TOOLSET_VERSION) + list(APPEND B2_FLAGS toolset=msvc-${CMAKE_VS_PLATFORM_TOOLSET_VERSION}) + else() + list(APPEND B2_FLAGS toolset=msvc-${MSVC_TOOLSET_VERSION}) + endif() set(CMAKE_STATIC_LIBRARY_PREFIX lib) elseif ("Clang" STREQUAL ${CMAKE_CXX_COMPILER_ID}) list(APPEND BOOTSTRAP_FLAGS --with-toolset=clang) From d1fd8f83b6c2bba2c0f37870931e3fee83f516c9 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 22 Dec 2020 14:58:28 -0800 Subject: [PATCH 119/155] Fix for .NET 3.5 install on Windows 2016 --- packer/build-windows-2016-vs-2017.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packer/build-windows-2016-vs-2017.json b/packer/build-windows-2016-vs-2017.json index 2f39df4f17..34be5ddb18 100644 --- a/packer/build-windows-2016-vs-2017.json +++ b/packer/build-windows-2016-vs-2017.json @@ -59,7 +59,7 @@ }, "disk_size": "100", "metadata": { - "windows-startup-script-cmd": "dism /Online /NoRestart /Enable-Feature /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" + "windows-startup-script-cmd": "dism /Online /NoRestart /Enable-Feature /All /FeatureName:NetFx3 & winrm quickconfig -quiet & winrm set winrm/config/service/auth @{Basic=\"true\"} & net user Administrator /active:yes" }, "communicator": "winrm", "winrm_username": "Administrator", From d41dd628f531b432b0378365c9e4bf7151f261ca Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 22 Dec 2020 16:12:21 -0800 Subject: [PATCH 120/155] README for GitHub Private Key --- ci/README.md | 57 +++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 41 insertions(+), 16 deletions(-) diff --git a/ci/README.md b/ci/README.md index ed3378885e..ae79b64385 100644 --- a/ci/README.md +++ b/ci/README.md @@ -1,22 +1,28 @@ # Geode Native Concourse Pipeline -The Geode Native Concourse pipeline is actually two Concourse pipelines. -The primary release pipeline builds the release artifacts for a given branch, like develop, release or support branch. -The secondary pull request (pr) pipeline builds the same artifacts as the release pipeline but for pull requests and without actually releasing or publishing anything. + +The Geode Native Concourse pipeline is actually two Concourse pipelines. The primary release pipeline builds the release +artifacts for a given branch, like develop, release or support branch. The secondary pull request (pr) pipeline builds +the same artifacts as the release pipeline but for pull requests and without actually releasing or publishing anything. The pipeline is fully self updating and can easily be bootstrapped into a properly configured Concourse deployment. -Concourse configuration requires TBD. Changes to the `ci` source directory will results in auto updates to the pipelines. +Concourse configuration requires TBD. Changes to the `ci` source directory will results in auto updates to the +pipelines. -Because Concourse workers aren't available on all platforms and have issues with resource sharing this pipeline utilizes external builders. -These builders are currently Google Compute VMs that are launched on demand for each build. +Because Concourse workers aren't available on all platforms and have issues with resource sharing this pipeline utilizes +external builders. These builders are currently Google Compute VMs that are launched on demand for each build. # Pipeline Setup + The pipeline can be installed or reconfigured via the `set-pipelin.sh` shell script. + ```console ./set-pipeline.sh --help ``` ## Examples + Given the local repository looks like the following: + ```console $ git remote get-url origin git@github.com:some-user/geode-native.git @@ -26,27 +32,37 @@ wip/some-branch ``` ### Typical + The most typical usage should require nothing more than the Concourse target, unless that happens to be named "default". -Everything else has reasonable defaults based on the currently checked out branch. +Everything else has reasonable defaults based on the currently checked out branch. + ```console $ ./set-pipeline.sh --target=some-concourse ``` -Executes `fly` from the path setting pipeline to target `some-concourse` for remote repository `git@github.com:some-user/geode-native.git`. -Pipeline names will be `some-user-wip-something` and `some-user-wip-something-pr`. + +Executes `fly` from the path setting pipeline to target `some-concourse` for remote +repository `git@github.com:some-user/geode-native.git`. Pipeline names will be `some-user-wip-something` +and `some-user-wip-something-pr`. ### Alternative repository URL and fly version + Sometimes you will have to support multiple versions of Concourse `fly` or need to fetch sources via https. + ```console $ ./set-pipeline.sh \ --fly=/path/to/fly \ --target=some-concourse \ --repository=https://github.com/some-user/geode-native.git ``` -Executes fly at `/path/to/fly` setting pipeline to target `some-concourse` for remote repository `https://github.com/some-user/geode-native.git`. -Pipelines name will be `some-user-wip-something` and `some-user-wip-something-pr`. + +Executes fly at `/path/to/fly` setting pipeline to target `some-concourse` for remote +repository `https://github.com/some-user/geode-native.git`. Pipelines name will be `some-user-wip-something` +and `some-user-wip-something-pr`. # Pipeline Steps + ## Release + * Detects new version or source * Build for each platform and configuration * Creates VM instances @@ -58,7 +74,9 @@ Pipelines name will be `some-user-wip-something` and `some-user-wip-something-pr * Uploads artifacts to GCS * Publishes to GitHub release (TODO) * Detects changes to pipeline sources and auto updates + ## Pull Release (PR) + * Detects new PR * Build for each platform and configuration * Creates VM instances @@ -71,8 +89,11 @@ Pipelines name will be `some-user-wip-something` and `some-user-wip-something-pr * Detects changes to pipeline sources and auto updates # Details + This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `git` and `gcloud`. + ## Dependencies + * [Google Cloud](https://console.cloud.google.com) * [Concourse](https://concourse-ci.org) v6.5.0+ * [`ytt`](https://get-ytt.io) v0.28.0+ @@ -80,18 +101,22 @@ This Concourse pipeline YAML is rendered using `ytt`. Depends on output from `gi * [`gcloud`](https://cloud.google.com/sdk/docs/install) SDK ## Layout + * base - Defines all common tasks across both pipelines. * release - Defines tasks tasks for release pipeline only. * pr - Defines tasks for pr pipeline only. * lib - ytt functions used by all templates. * docker/task - Minimal image required to communicate with builders. -#TODO +# TODO + ## Concourse Installation + * Resolve chicken/egg problem with external API address. - * `helm install concourse concourse/concourse` - * `helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://1.2.3.4:8080` + * `helm install concourse concourse/concourse` + * `helm upgrade concourse concourse/concourse --set web.service.api.type=LoadBalancer,concourse.web.externalUrl=http://1.2.3.4:8080` * Task for getting secrets into k8s. - * `kubectl create secret generic gcr-json-key --from-literal "value=$(cat XXX.json)" --namespace=concourse-main` - * `kubectl create secret generic github-access-token --from-literal "value=XXX" --namespace=concourse-main` + * `kubectl create secret generic gcr-json-key --from-literal "value=$(cat XXX.json)" --namespace=concourse-main` + * `kubectl create secret generic github-access-token --from-literal "value=XXX" --namespace=concourse-main` + * `kubectl create secret generic github-private-key --from-literal "value=$(cat XXX)" --namespace=concourse-main` * Use docker locally for initial pipeline deployment to avoid `gcloud`, `ytt`, and `fly` version issues. From 53d953401c1e5dcd93ee4e7b44b4029cbcfda9f2 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 22 Dec 2020 16:12:46 -0800 Subject: [PATCH 121/155] Use tar to speed up file copies. --- ci/lib/templates.lib.txt | 4 ++++ ci/lib/templates.lib.yml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ci/lib/templates.lib.txt b/ci/lib/templates.lib.txt index 5b2002b140..63eec986a7 100644 --- a/ci/lib/templates.lib.txt +++ b/ci/lib/templates.lib.txt @@ -59,6 +59,10 @@ function remote_download { scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "${INSTANCE_USER}@${external_ip}:${1}" "$2" } +function remote_download_directory { + ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} tar -C $(dirname ${1}) -czf - $(basename ${1}) | tar -C ${2} -zxvf - +} + function remote_upload { scp ${SSH_OPTIONS} -i ${ssh_key_file} -q -r "$1" "${INSTANCE_USER}@${external_ip}:${2}" } diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index 61ea4e8605..a47b8145aa 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -328,7 +328,7 @@ config: set -ueo pipefail (@= remote_functions() @) - remote_download build . + remote_download_directory build . #@ end --- From 7500f176a01cb49948d720c3486f9a9595873e81 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 22 Dec 2020 16:13:12 -0800 Subject: [PATCH 122/155] Kill all user processes before running tests. --- ci/lib/templates.lib.txt | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/ci/lib/templates.lib.txt b/ci/lib/templates.lib.txt index 63eec986a7..5a27ce8d71 100644 --- a/ci/lib/templates.lib.txt +++ b/ci/lib/templates.lib.txt @@ -70,35 +70,34 @@ function remote_upload { (@- end @) (@ def run_cpp_unit_tests(): -@) -remote_shell taskkill /t /f /im ctest.exe /im apache-geode_unittests.exe || true -remote_shell 'pkill ^ctest$; pkill ^apache-geode_unittests$' || true +remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true +remote_shell pkill -u ${INSTANCE_USER} || true remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --rerun-failed (@- end @) (@ def run_cpp_integration_tests(): -@) -remote_shell taskkill /t /f /im ctest.exe /im cpp-integration-test.exe /im java.exe || true -remote_shell 'pkill ^ctest$; pkill ^cpp-integration-test$; pkill ^java$' || true +remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true +remote_shell pkill -u ${INSTANCE_USER} || true remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --rerun-failed -E ^BasicIPv6Test (@- end @) (@ def run_cpp_legacy_integration_tests(): -@) - -remote_shell taskkill /t /f /im ctest.exe /im test* /im java.exe || true -remote_shell 'pkill ^ctest$; pkill ^test; pkill ^java$' || true +remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true +remote_shell pkill -u ${INSTANCE_USER} || true remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --rerun-failed (@- end @) (@ def run_net_unit_tests(): -@) -remote_shell taskkill /t /f /im xunit.console.exe || true +remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true remote_shell cmake -E chdir build/clicache/test2 ../packages/xunit.runner.console.2.4.0/tools/net452/xunit.console.exe ${CMAKE_CONFIG}/Apache.Geode.Tests2.dll -parallel all (@- end @) (@ def run_net_integration_tests(): -@) -remote_shell taskkill /t /f /im xunit.console.exe /im java.exe || true +remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true remote_shell cmake -E chdir build/clicache/integration-test2 ../packages/xunit.runner.console.2.4.0/tools/net452/xunit.console.exe ${CMAKE_CONFIG}/Apache.Geode.IntegrationTests2.dll -verbose -maxthreads 6 (@- end @) (@ def run_net_legacy_integration_tests(): -@) -remote_shell taskkill /t /f /im ctest.exe /im nunit-console.exe /im nunit-agent.exe /im fwkclient.exe /im java.exe || true +remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true remote_shell cmake -E chdir build/clicache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=1000 --output-on-failure --rerun-failed (@- end @) From 26d9002ae610488331071db88585b3a2acb45c7d Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 23 Dec 2020 18:21:52 -0800 Subject: [PATCH 123/155] Framework Gfsh gains JVM heap settings. --- .../integration/framework/GfshExecute.cpp | 43 +++++++++++++------ cppcache/integration/framework/GfshExecute.h | 3 ++ 2 files changed, 32 insertions(+), 14 deletions(-) diff --git a/cppcache/integration/framework/GfshExecute.cpp b/cppcache/integration/framework/GfshExecute.cpp index 4e83416ca4..a6f1686232 100644 --- a/cppcache/integration/framework/GfshExecute.cpp +++ b/cppcache/integration/framework/GfshExecute.cpp @@ -44,7 +44,8 @@ std::string GfshExecuteException::getName() const { int GfshExecuteException::getGfshReturnCode() { return returnCode_; } void GfshExecute::execute(const std::string &command, const std::string &user, - const std::string &password, const std::string &keyStorePath, + const std::string &password, + const std::string &keyStorePath, const std::string &trustStorePath, const std::string &keyStorePassword, const std::string &trustStorePassword) { @@ -58,14 +59,16 @@ void GfshExecute::execute(const std::string &command, const std::string &user, commands.push_back("-e"); commands.push_back(command); - auto env = boost::this_process::environment(); - environment _env = env; - // broken on windows env["JAVA_ARGS"] = "-Xmx1g -client"; + environment env{boost::this_process::environment()}; + + if (!maxHeap_.empty()) { + env["JAVA_ARGS"] = "-Xmx" + maxHeap_ + " " + env["JAVA_ARGS"].to_string(); + } ipstream outStream; ipstream errStream; - auto gfsh = executeChild(commands, _env, outStream, errStream); + auto gfsh = executeChild(commands, env, outStream, errStream); std::string line; @@ -85,7 +88,9 @@ void GfshExecute::execute(const std::string &command, const std::string &user, if (exit_code) { throw GfshExecuteException("gfsh error", exit_code); } - extractConnectionCommand(command, user, password, keyStorePath, trustStorePath, keyStorePassword, trustStorePassword); + extractConnectionCommand(command, user, password, keyStorePath, + trustStorePath, keyStorePassword, + trustStorePassword); } child GfshExecute::executeChild(std::vector &commands, @@ -99,10 +104,11 @@ child GfshExecute::executeChild(std::vector &commands, args = commands, env, std_out > outStream, std_err > errStream); } -void GfshExecute::extractConnectionCommand(const std::string &command, const std::string &user, - const std::string &password, const std::string &keyStorePath, - const std::string &trustStorePath, const std::string &keyStorePassword, - const std::string &trustStorePassword) { +void GfshExecute::extractConnectionCommand( + const std::string &command, const std::string &user, + const std::string &password, const std::string &keyStorePath, + const std::string &trustStorePath, const std::string &keyStorePassword, + const std::string &trustStorePassword) { if (starts_with(command, std::string("connect"))) { connection_ = command; } else if (starts_with(command, std::string("start locator"))) { @@ -121,15 +127,24 @@ void GfshExecute::extractConnectionCommand(const std::string &command, const std jmxManagerPort = jmxManagerPortMatch[1]; } - connection_ = "connect --jmx-manager=" + jmxManagerHost + "[" + jmxManagerPort + "]"; + connection_ = + "connect --jmx-manager=" + jmxManagerHost + "[" + jmxManagerPort + "]"; if (!(user.empty() || password.empty())) { connection_ += " --user=" + user + " --password=" + password; } - if(!(keyStorePath.empty() || trustStorePath.empty() || keyStorePassword.empty() || trustStorePassword.empty())) { - connection_ += " --use-ssl=true --key-store=" + keyStorePath + " --trust-store=" + trustStorePath + - " --key-store-password=" + keyStorePassword + " --trust-store-password=" + trustStorePassword; + if (!(keyStorePath.empty() || trustStorePath.empty() || + keyStorePassword.empty() || trustStorePassword.empty())) { + connection_ += " --use-ssl=true --key-store=" + keyStorePath + + " --trust-store=" + trustStorePath + + " --key-store-password=" + keyStorePassword + + " --trust-store-password=" + trustStorePassword; } } } + +GfshExecute &GfshExecute::withMaxHeap(std::string maxHeap) { + maxHeap_ = std::move(maxHeap); + return *this; +} diff --git a/cppcache/integration/framework/GfshExecute.h b/cppcache/integration/framework/GfshExecute.h index 897e77a5ec..fb70617e3a 100644 --- a/cppcache/integration/framework/GfshExecute.h +++ b/cppcache/integration/framework/GfshExecute.h @@ -57,6 +57,7 @@ class GfshExecuteException : public apache::geode::client::Exception { class GfshExecute : public Gfsh { std::string connection_; + std::string maxHeap_ = "256m"; void execute(const std::string &command, const std::string &user, const std::string &password, const std::string &keyStorePath, @@ -80,6 +81,8 @@ class GfshExecute : public Gfsh { public: GfshExecute() = default; virtual ~GfshExecute() override = default; + + GfshExecute &withMaxHeap(std::string maxHeap); }; #endif // INTEGRATION_TEST_FRAMEWORK_GFSHEXECUTE_H From 5016b3f198642080c88fed106f74093b8c8bcd6c Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 23 Dec 2020 19:24:18 -0800 Subject: [PATCH 124/155] Legacy integration tests use framework Gfsh. Fixes testThinClientPutWithDelta. --- cppcache/integration-test/CMakeLists.txt | 2 +- cppcache/integration-test/CacheHelper.cpp | 267 +++++----------------- cppcache/integration-test/CacheHelper.hpp | 8 +- cppcache/integration/framework/Gfsh.cpp | 6 + cppcache/integration/framework/Gfsh.h | 3 + 5 files changed, 74 insertions(+), 212 deletions(-) diff --git a/cppcache/integration-test/CMakeLists.txt b/cppcache/integration-test/CMakeLists.txt index 7a40f756c1..3594df6610 100644 --- a/cppcache/integration-test/CMakeLists.txt +++ b/cppcache/integration-test/CMakeLists.txt @@ -24,6 +24,7 @@ add_library(test-cppcache-utils STATIC ) target_link_libraries(test-cppcache-utils PRIVATE + integration-framework ACE::ACE _WarningsAsError PUBLIC @@ -246,7 +247,6 @@ set_tests_properties(testFwPerf testThinClientPRSingleHop testThinClientPoolAttrTest testThinClientPoolLocator - testThinClientPutWithDelta testThinClientRemoteQueryTimeout testThinClientRemoveOps testThinClientSecurityAuthentication diff --git a/cppcache/integration-test/CacheHelper.cpp b/cppcache/integration-test/CacheHelper.cpp index 9556337e77..9a2718eb03 100644 --- a/cppcache/integration-test/CacheHelper.cpp +++ b/cppcache/integration-test/CacheHelper.cpp @@ -15,7 +15,6 @@ * limitations under the License. */ -#include #include #include #include @@ -53,16 +52,16 @@ #endif #if defined(WIN32) -#define GFSH "gfsh.bat" #define COPY_COMMAND "copy /y" #define DELETE_COMMAND "del /f" #define PATH_SEP "\\" #else -#define GFSH "gfsh" #define DELETE_COMMAND "rm -f" #define PATH_SEP "/" #endif +#include "framework/GfshExecute.h" + extern ClientCleanup gClientCleanup; namespace apache { @@ -280,11 +279,6 @@ CacheHelper::~CacheHelper() { disconnect(); } -void CacheHelper::closePool(const char *poolName, bool keepAlive) { - auto pool = getCache()->getPoolManager().find(poolName); - pool->destroy(keepAlive); -} - void CacheHelper::disconnect(bool keepalive) { if (cachePtr == nullptr) { return; @@ -353,11 +347,6 @@ void CacheHelper::createLRURegion(const char *regionName, ASSERT(regionPtr != nullptr, "failed to create region."); } -void CacheHelper::createDistRegion(const char *regionName, - std::shared_ptr ®ionPtr) { - createDistRegion(regionName, regionPtr, 10); -} - void CacheHelper::createDistRegion(const char *regionName, std::shared_ptr ®ionPtr, uint32_t size) { @@ -976,92 +965,6 @@ const char *CacheHelper::getLocatorHostPort(int locPort) { sprintf(tmp, "%d", locPort); gfendpoints += tmp; return (new std::string(gfendpoints.c_str()))->c_str(); - ; -} - -const std::string CacheHelper::getTcrEndpoints2(bool &isLocalServer, - int numberOfServers) { - static char *gfjavaenv = ACE_OS::getenv("GFJAVA"); - std::string gfendpoints; - static bool gflocalserver = false; - char tmp[128]; - - if (gfendpoints.empty()) { - if ((ACE_OS::strchr(gfjavaenv, '\\') != nullptr) || - (ACE_OS::strchr(gfjavaenv, '/') != nullptr)) { - gflocalserver = true; - /* Support for multiple servers Max = 10*/ - switch (numberOfServers) { - case 1: - // gfendpoints = "localhost:24680"; - { - gfendpoints = "localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort1); - gfendpoints += tmp; - } - break; - case 2: - // gfendpoints = "localhost:24680,localhost:24681"; - { - gfendpoints = "localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort1); - gfendpoints += tmp; - gfendpoints += ",localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort2); - gfendpoints += tmp; - } - break; - case 3: - // gfendpoints = "localhost:24680,localhost:24681,localhost:24682"; - { - gfendpoints = "localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort1); - gfendpoints += tmp; - gfendpoints += ",localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort2); - gfendpoints += tmp; - gfendpoints += ",localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort3); - gfendpoints += tmp; - } - break; - case 4: - // gfendpoints = - // "localhost:24680,localhost:24681,localhost:24682,localhost:24683"; - { - gfendpoints = "localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort1); - gfendpoints += tmp; - gfendpoints += ",localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort2); - gfendpoints += tmp; - gfendpoints += ",localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort3); - gfendpoints += tmp; - gfendpoints += ",localhost:"; - sprintf(tmp, "%d", CacheHelper::staticHostPort4); - gfendpoints += tmp; - } - break; - default: - ASSERT((numberOfServers <= 10), - " More than 10 servers not supported"); - gfendpoints = "localhost:24680"; - char temp[8]; - for (int i = 1; i <= numberOfServers - 1; i++) { - gfendpoints += ",localhost:2468"; - gfendpoints += ACE_OS::itoa(i, temp, 10); - } - break; - } - } else { - gfendpoints = gfjavaenv; - } - } - ASSERT(gfjavaenv != nullptr, - "Environment variable GFJAVA for java build directory is not set."); - isLocalServer = gflocalserver; - return (gfendpoints); } const char *CacheHelper::getLocatorHostPort(bool &isLocator, @@ -1137,7 +1040,7 @@ void CacheHelper::cleanupServerInstances() { } } void CacheHelper::initServer(int instance, const char *xml, - const char *locHostport, const char *authParam, + const char *locHostport, const char * /*unused*/, bool ssl, bool enableDelta, bool multiDS, bool testServerGC, bool untrustedCert, bool useSecurityManager) { @@ -1147,12 +1050,7 @@ void CacheHelper::initServer(int instance, const char *xml, printf("TimeBomb registered server cleanupcallback \n"); } printf("Inside initServer added\n"); - if (authParam != nullptr) { - printf("Inside initServer with authParam = %s\n", authParam); - } else { - printf("Inside initServer with authParam as nullptr\n"); - authParam = ""; - } + static const char *gfjavaenv = ACE_OS::getenv("GFJAVA"); static const char *gfLogLevel = ACE_OS::getenv("GFE_LOGLEVEL"); static const char *gfSecLogLevel = ACE_OS::getenv("GFE_SECLOGLEVEL"); @@ -1161,7 +1059,6 @@ void CacheHelper::initServer(int instance, const char *xml, static const char *mcastAddr = ACE_OS::getenv("MCAST_ADDR"); static char *classpath = ACE_OS::getenv("GF_CLASSPATH"); - char cmd[2048]; char tmp[128]; char currWDPath[2048]; int portNum = 0; @@ -1272,15 +1169,6 @@ void CacheHelper::initServer(int instance, const char *xml, printf(" creating dir = %s \n", sname.c_str()); ACE_OS::mkdir(sname.c_str()); - sprintf(cmd, "%s/bin/%s stop server --dir=%s 2>&1", gfjavaenv, GFSH, - currDir.c_str()); - - LOG(cmd); - ACE_OS::system(cmd); - std::string deltaProperty = ""; - if (!enableDelta) { - deltaProperty = "delta-propagation=false"; - } int64_t defaultTombstone_timeout = 600000; int64_t defaultTombstone_gc_threshold = 100000; int64_t userTombstone_timeout = 1000; @@ -1292,50 +1180,41 @@ void CacheHelper::initServer(int instance, const char *xml, ACE_OS::mkdir("backupDirectory4"); } - if (locHostport != nullptr) { // check number of locator host port. - std::string geodeProperties = generateGeodeProperties( - currDir, ssl, -1, 0, untrustedCert, useSecurityManager); - - sprintf( - cmd, - "%s/bin/%s start server --classpath=%s --name=%s " - "--cache-xml-file=%s %s --dir=%s --server-port=%d --log-level=%s " - "--properties-file=%s %s %s " - "--J=-Dgemfire.tombstone-timeout=%" PRId64 - " " - "--J=-Dgemfire.tombstone-gc-hreshold=%" PRId64 - " " - "--J=-Dgemfire.security-log-level=%s --J=-Xmx1024m --J=-Xms128m 2>&1", - gfjavaenv, GFSH, classpath, sname.c_str(), xmlFile.c_str(), - useSecurityManager ? "--user=root --password=root-password" : "", - currDir.c_str(), portNum, gfLogLevel, geodeProperties.c_str(), - authParam, deltaProperty.c_str(), - testServerGC ? userTombstone_timeout : defaultTombstone_timeout, - testServerGC ? userTombstone_gc_threshold - : defaultTombstone_gc_threshold, - gfSecLogLevel); - } else { - sprintf( - cmd, - "%s/bin/%s start server --classpath=%s --name=%s " - "--cache-xml-file=%s %s --dir=%s --server-port=%d --log-level=%s %s %s " - "--J=-Dgemfire.tombstone-timeout=%" PRId64 - " " - "--J=-Dgemfire.tombstone-gc-hreshold=%" PRId64 - " " - "--J=-Dgemfire.security-log-level=%s --J=-Xmx1024m --J=-Xms128m 2>&1", - gfjavaenv, GFSH, classpath, sname.c_str(), xmlFile.c_str(), - useSecurityManager ? "--user=root --password=root-password" : "", - currDir.c_str(), portNum, gfLogLevel, authParam, deltaProperty.c_str(), - testServerGC ? userTombstone_timeout : defaultTombstone_timeout, - testServerGC ? userTombstone_gc_threshold - : defaultTombstone_gc_threshold, - gfSecLogLevel); + auto gfsh = + GfshExecute() + .start() + .server() + .withClasspath(classpath) + .withName(sname) + .withCacheXMLFile(xmlFile) + .withDir(currDir) + .withPort(portNum) + .withLogLevel(gfLogLevel) + .withMaxHeap("1g") + .withSystemProperty( + "gemfire.tombstone-timeout", + std::to_string(testServerGC ? userTombstone_timeout + : defaultTombstone_timeout)) + .withSystemProperty( + "gemfire.tombstone-gc-hreshold", + std::to_string(testServerGC ? userTombstone_gc_threshold + : defaultTombstone_gc_threshold)) + .withSystemProperty("gemfire.security-log-level", gfSecLogLevel); + + if (useSecurityManager) { + gfsh.withUser("root").withPassword("root-password"); } - LOG(cmd); - int e = ACE_OS::system(cmd); - ASSERT(0 == e, "cmd failed"); + if (locHostport != nullptr) { + gfsh.withPropertiesFile(generateGeodeProperties( + currDir, ssl, -1, 0, untrustedCert, useSecurityManager)); + } + + if (!enableDelta) { + gfsh.withSystemProperty("gemfire.delta-propagation", "false"); + } + + gfsh.execute(); staticServerInstanceList.push_back(instance); printf("added server instance %d\n", instance); @@ -1457,7 +1336,6 @@ void CacheHelper::createDuplicateXMLFile(std::string &duplicateFile, void CacheHelper::closeServer(int instance) { static char *gfjavaenv = ACE_OS::getenv("GFJAVA"); - char cmd[2048]; char tmp[128]; char currWDPath[2048]; @@ -1499,11 +1377,10 @@ void CacheHelper::closeServer(int instance) { break; } - sprintf(cmd, "%s/bin/%s stop server --dir=%s 2>&1", gfjavaenv, GFSH, - currDir.c_str()); - - LOG(cmd); - ACE_OS::system(cmd); + try { + GfshExecute().stop().server().withDir(currDir).execute(); + } catch (const GfshExecuteException &) { + } terminate_process_file(currDir + "/vf.gf.server.pid", std::chrono::seconds(10)); @@ -1555,10 +1432,10 @@ void CacheHelper::closeLocator(int instance, bool) { break; } - sprintf(cmd, "%s/bin/%s stop locator --dir=%s", gfjavaenv, GFSH, - currDir.c_str()); - LOG(cmd); - ACE_OS::system(cmd); + try { + GfshExecute().stop().locator().withDir(currDir).execute(); + } catch (const GfshExecuteException &) { + } terminate_process_file(currDir + "/vf.gf.locator.pid", std::chrono::seconds(10)); @@ -1654,11 +1531,8 @@ void CacheHelper::initLocator(int instance, bool ssl, bool, int dsId, } static char *gfjavaenv = ACE_OS::getenv("GFJAVA"); - char cmd[2048]; char currWDPath[2048]; std::string currDir = ACE_OS::getcwd(currWDPath, 2048); - // std::string keystore = std::string(ACE_OS::getenv("TESTSRC")) + - // "/keystore"; ASSERT(gfjavaenv != nullptr, "Environment variable GFJAVA for java build directory is not set."); @@ -1675,25 +1549,19 @@ void CacheHelper::initLocator(int instance, bool ssl, bool, int dsId, char tmp[100]; switch (instance) { case 1: - // portnum = 34756; portnum = CacheHelper::staticLocatorHostPort1; sprintf(tmp, "%d", CacheHelper::staticLocatorHostPort1); locDirname += tmp; - // locDirname += "1"; break; case 2: - // portnum = 34757; portnum = CacheHelper::staticLocatorHostPort2; sprintf(tmp, "%d", CacheHelper::staticLocatorHostPort2); locDirname += tmp; - // locDirname += "2"; break; default: - // portnum = 34758; portnum = CacheHelper::staticLocatorHostPort3; sprintf(tmp, "%d", CacheHelper::staticLocatorHostPort3); locDirname += tmp; - // locDirname += "3"; break; } @@ -1706,38 +1574,27 @@ void CacheHelper::initLocator(int instance, bool ssl, bool, int dsId, std::string geodeFile = generateGeodeProperties( currDir, ssl, dsId, remoteLocator, untrustedCert, useSecurityManager); - sprintf(cmd, "%s/bin/%s stop locator --dir=%s --properties-file=%s ", - gfjavaenv, GFSH, currDir.c_str(), geodeFile.c_str()); - - LOG(cmd); - ACE_OS::system(cmd); - - static char *classpath = ACE_OS::getenv("GF_CLASSPATH"); - std::string propertiesFile = - useSecurityManager - ? std::string("--security-properties-file=") + geodeFile - : std::string("--properties-file=") + geodeFile; - sprintf(cmd, - "%s/bin/%s start locator --name=%s --port=%d --dir=%s " - "%s --http-service-port=0 --classpath=%s " - "--J=-Dgemfire.jmx-manager-port=%d", - gfjavaenv, GFSH, locDirname.c_str(), portnum, currDir.c_str(), - propertiesFile.c_str(), classpath, jmxManagerPort); + std::string classpath = ACE_OS::getenv("GF_CLASSPATH"); + + auto gfsh = GfshExecute() + .start() + .locator() + .withName(locDirname) + .withPort(portnum) + .withDir(currDir) + .withClasspath(classpath) + .withHttpServicePort(0) + .withJmxManagerPort(jmxManagerPort); + if (useSecurityManager) { + gfsh.withSecurityPropertiesFile(geodeFile); + } else { + gfsh.withPropertiesFile(geodeFile); + } + gfsh.execute(); - LOG(cmd); - ACE_OS::system(cmd); staticLocatorInstanceList.push_back(instance); } -void CacheHelper::clearSecProp() { - auto tmpSecProp = CacheHelper::getHelper() - .getCache() - ->getSystemProperties() - .getSecurityProperties(); - tmpSecProp->remove("security-username"); - tmpSecProp->remove("security-password"); -} - void CacheHelper::setJavaConnectionPoolSize(uint32_t size) { CacheHelper::getHelper() .getCache() diff --git a/cppcache/integration-test/CacheHelper.hpp b/cppcache/integration-test/CacheHelper.hpp index 06b0da8df2..553f84c28e 100644 --- a/cppcache/integration-test/CacheHelper.hpp +++ b/cppcache/integration-test/CacheHelper.hpp @@ -101,8 +101,6 @@ class CacheHelper { virtual ~CacheHelper(); - void closePool(const char* poolName, bool keepAlive = false); - void disconnect(bool keepalive = false); void createPlainRegion(const char* regionName, @@ -284,7 +282,7 @@ class CacheHelper { static int staticHostPort4; static const std::string getTcrEndpoints(bool& isLocalServer, - int numberOfServers = 1); + int numberOfServers = 1); static int staticLocatorHostPort1; static int staticLocatorHostPort2; @@ -302,7 +300,7 @@ class CacheHelper { int numberOfLocators = 0); static const std::string getTcrEndpoints2(bool& isLocalServer, - int numberOfServers = 1); + int numberOfServers = 1); static std::list staticServerInstanceList; static bool isServerCleanupCallbackRegistered; @@ -355,8 +353,6 @@ class CacheHelper { bool untrustedCert = false, bool useSecurityManager = false); - static void clearSecProp(); - static void setJavaConnectionPoolSize(uint32_t size); static bool isSeedSet; diff --git a/cppcache/integration/framework/Gfsh.cpp b/cppcache/integration/framework/Gfsh.cpp index 17ad57642f..7bb5426c98 100644 --- a/cppcache/integration/framework/Gfsh.cpp +++ b/cppcache/integration/framework/Gfsh.cpp @@ -353,6 +353,12 @@ Gfsh::Start::Server &Gfsh::Start::Server::withHostNameForClients( return *this; } +Gfsh::Start::Server &Gfsh::Start::Server::withSystemProperty( + const std::string &key, const std::string &value) { + command_ += " --J=-D" + key + "=" + value; + return *this; +} + Gfsh::Stop::Stop(Gfsh &gfsh) : gfsh_(gfsh) {} Gfsh::Stop::Server Gfsh::Stop::server() { return Server{gfsh_}; } diff --git a/cppcache/integration/framework/Gfsh.h b/cppcache/integration/framework/Gfsh.h index 1170fe6a4c..9f046d6a14 100644 --- a/cppcache/integration/framework/Gfsh.h +++ b/cppcache/integration/framework/Gfsh.h @@ -192,6 +192,9 @@ class Gfsh { Server &withSecurityPropertiesFile(const std::string file); Server &withHostNameForClients(const std::string hostName); + + Server &withSystemProperty(const std::string &key, + const std::string &value); }; private: From 695812005f239597472d78b772b2f484fd2253df Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 24 Dec 2020 11:25:47 -0800 Subject: [PATCH 125/155] Remove unused method. --- cppcache/integration/framework/Gfsh.cpp | 6 ------ cppcache/integration/framework/Gfsh.h | 2 -- 2 files changed, 8 deletions(-) diff --git a/cppcache/integration/framework/Gfsh.cpp b/cppcache/integration/framework/Gfsh.cpp index 7bb5426c98..54d99e61a9 100644 --- a/cppcache/integration/framework/Gfsh.cpp +++ b/cppcache/integration/framework/Gfsh.cpp @@ -130,12 +130,6 @@ Gfsh::Start::Locator &Gfsh::Start::Locator::withSecurityManager( return *this; } -Gfsh::Start::Locator &Gfsh::Start::Locator::withConnect( - const std::string connect) { - command_ += " --connect=" + connect; - return *this; -} - Gfsh::Start::Locator &Gfsh::Start::Locator::withPreferIPv6(bool useIPv6) { if (useIPv6) { command_ += " --J=-Djava.net.preferIPv6Addresses=true"; diff --git a/cppcache/integration/framework/Gfsh.h b/cppcache/integration/framework/Gfsh.h index 9f046d6a14..4b834fbc9d 100644 --- a/cppcache/integration/framework/Gfsh.h +++ b/cppcache/integration/framework/Gfsh.h @@ -118,8 +118,6 @@ class Gfsh { Locator &withSecurityManager(const std::string securityManager); - Locator &withConnect(const std::string connect); - Locator &withPreferIPv6(bool useIPv6); Locator &withSslEnabledComponents(const std::string &components); From 15b8e7bdffa9b15ca4f50e300e31905f4a8896ff Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 24 Dec 2020 11:26:40 -0800 Subject: [PATCH 126/155] Fixes gfsh scope. --- cppcache/integration-test/CMakeLists.txt | 2 +- cppcache/integration-test/CacheHelper.cpp | 45 +++++++++++------------ cppcache/integration-test/CacheHelper.hpp | 3 ++ 3 files changed, 26 insertions(+), 24 deletions(-) diff --git a/cppcache/integration-test/CMakeLists.txt b/cppcache/integration-test/CMakeLists.txt index 3594df6610..0a0a450d8a 100644 --- a/cppcache/integration-test/CMakeLists.txt +++ b/cppcache/integration-test/CMakeLists.txt @@ -24,12 +24,12 @@ add_library(test-cppcache-utils STATIC ) target_link_libraries(test-cppcache-utils PRIVATE - integration-framework ACE::ACE _WarningsAsError PUBLIC apache-geode framework + integration-framework ) set_target_properties(test-cppcache-utils PROPERTIES CXX_VISIBILITY_PRESET hidden diff --git a/cppcache/integration-test/CacheHelper.cpp b/cppcache/integration-test/CacheHelper.cpp index 9a2718eb03..7702733522 100644 --- a/cppcache/integration-test/CacheHelper.cpp +++ b/cppcache/integration-test/CacheHelper.cpp @@ -60,14 +60,14 @@ #define PATH_SEP "/" #endif -#include "framework/GfshExecute.h" - extern ClientCleanup gClientCleanup; namespace apache { namespace geode { namespace client { +GfshExecute CacheHelper::gfsh; + #define RANDOM_NUMBER_OFFSET 14000 #define RANDOM_NUMBER_DIVIDER 15000 std::shared_ptr CacheHelper::getCache() { return cachePtr; } @@ -1180,9 +1180,8 @@ void CacheHelper::initServer(int instance, const char *xml, ACE_OS::mkdir("backupDirectory4"); } - auto gfsh = - GfshExecute() - .start() + auto server = + gfsh.start() .server() .withClasspath(classpath) .withName(sname) @@ -1202,19 +1201,19 @@ void CacheHelper::initServer(int instance, const char *xml, .withSystemProperty("gemfire.security-log-level", gfSecLogLevel); if (useSecurityManager) { - gfsh.withUser("root").withPassword("root-password"); + server.withUser("root").withPassword("root-password"); } if (locHostport != nullptr) { - gfsh.withPropertiesFile(generateGeodeProperties( + server.withPropertiesFile(generateGeodeProperties( currDir, ssl, -1, 0, untrustedCert, useSecurityManager)); } if (!enableDelta) { - gfsh.withSystemProperty("gemfire.delta-propagation", "false"); + server.withSystemProperty("gemfire.delta-propagation", "false"); } - gfsh.execute(); + server.execute(); staticServerInstanceList.push_back(instance); printf("added server instance %d\n", instance); @@ -1378,7 +1377,7 @@ void CacheHelper::closeServer(int instance) { } try { - GfshExecute().stop().server().withDir(currDir).execute(); + gfsh.stop().server().withDir(currDir).execute(); } catch (const GfshExecuteException &) { } @@ -1433,7 +1432,7 @@ void CacheHelper::closeLocator(int instance, bool) { } try { - GfshExecute().stop().locator().withDir(currDir).execute(); + gfsh.stop().locator().withDir(currDir).execute(); } catch (const GfshExecuteException &) { } @@ -1576,21 +1575,21 @@ void CacheHelper::initLocator(int instance, bool ssl, bool, int dsId, std::string classpath = ACE_OS::getenv("GF_CLASSPATH"); - auto gfsh = GfshExecute() - .start() - .locator() - .withName(locDirname) - .withPort(portnum) - .withDir(currDir) - .withClasspath(classpath) - .withHttpServicePort(0) - .withJmxManagerPort(jmxManagerPort); + auto locator = gfsh.start() + .locator() + .withConnect(false) + .withName(locDirname) + .withPort(portnum) + .withDir(currDir) + .withClasspath(classpath) + .withHttpServicePort(0) + .withJmxManagerPort(jmxManagerPort); if (useSecurityManager) { - gfsh.withSecurityPropertiesFile(geodeFile); + locator.withSecurityPropertiesFile(geodeFile); } else { - gfsh.withPropertiesFile(geodeFile); + locator.withPropertiesFile(geodeFile); } - gfsh.execute(); + locator.execute(); staticLocatorInstanceList.push_back(instance); } diff --git a/cppcache/integration-test/CacheHelper.hpp b/cppcache/integration-test/CacheHelper.hpp index 553f84c28e..3a5ba3ce9b 100644 --- a/cppcache/integration-test/CacheHelper.hpp +++ b/cppcache/integration-test/CacheHelper.hpp @@ -31,6 +31,8 @@ #include #include +#include "framework/GfshExecute.h" + #include "TimeBomb.hpp" #include "DistributedSystemImpl.hpp" #include "Utils.hpp" @@ -52,6 +54,7 @@ class CacheHelper { public: static CacheHelper* singleton; static std::list staticConfigFileList; + static GfshExecute gfsh; std::shared_ptr cachePtr; std::shared_ptr rootRegionPtr; From 016b4bce8a57a89d3291300593b85a9a93a1e11a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 24 Dec 2020 11:42:57 -0800 Subject: [PATCH 127/155] Limit locator heap. --- cppcache/integration-test/CacheHelper.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cppcache/integration-test/CacheHelper.cpp b/cppcache/integration-test/CacheHelper.cpp index 7702733522..288c77fcb8 100644 --- a/cppcache/integration-test/CacheHelper.cpp +++ b/cppcache/integration-test/CacheHelper.cpp @@ -1583,7 +1583,8 @@ void CacheHelper::initLocator(int instance, bool ssl, bool, int dsId, .withDir(currDir) .withClasspath(classpath) .withHttpServicePort(0) - .withJmxManagerPort(jmxManagerPort); + .withJmxManagerPort(jmxManagerPort) + .withMaxHeap("256m"); if (useSecurityManager) { locator.withSecurityPropertiesFile(geodeFile); } else { From d9f8b4f5a7e7521fe0f964c871f4f7cd4052bbc8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 24 Dec 2020 11:59:12 -0800 Subject: [PATCH 128/155] More gfsh scoping changes. --- cppcache/integration-test/CMakeLists.txt | 2 +- cppcache/integration-test/CacheHelper.cpp | 9 ++++++--- cppcache/integration-test/CacheHelper.hpp | 3 --- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/cppcache/integration-test/CMakeLists.txt b/cppcache/integration-test/CMakeLists.txt index 0a0a450d8a..3594df6610 100644 --- a/cppcache/integration-test/CMakeLists.txt +++ b/cppcache/integration-test/CMakeLists.txt @@ -24,12 +24,12 @@ add_library(test-cppcache-utils STATIC ) target_link_libraries(test-cppcache-utils PRIVATE + integration-framework ACE::ACE _WarningsAsError PUBLIC apache-geode framework - integration-framework ) set_target_properties(test-cppcache-utils PROPERTIES CXX_VISIBILITY_PRESET hidden diff --git a/cppcache/integration-test/CacheHelper.cpp b/cppcache/integration-test/CacheHelper.cpp index 288c77fcb8..dde64e2972 100644 --- a/cppcache/integration-test/CacheHelper.cpp +++ b/cppcache/integration-test/CacheHelper.cpp @@ -29,6 +29,8 @@ #include #include +#include "framework/GfshExecute.h" + #include "CacheRegionHelper.hpp" #include "DistributedSystemImpl.hpp" #include "TimeBomb.hpp" @@ -66,8 +68,6 @@ namespace apache { namespace geode { namespace client { -GfshExecute CacheHelper::gfsh; - #define RANDOM_NUMBER_OFFSET 14000 #define RANDOM_NUMBER_DIVIDER 15000 std::shared_ptr CacheHelper::getCache() { return cachePtr; } @@ -1180,6 +1180,7 @@ void CacheHelper::initServer(int instance, const char *xml, ACE_OS::mkdir("backupDirectory4"); } + GfshExecute gfsh; auto server = gfsh.start() .server() @@ -1377,6 +1378,7 @@ void CacheHelper::closeServer(int instance) { } try { + GfshExecute gfsh; gfsh.stop().server().withDir(currDir).execute(); } catch (const GfshExecuteException &) { } @@ -1432,6 +1434,7 @@ void CacheHelper::closeLocator(int instance, bool) { } try { + GfshExecute gfsh; gfsh.stop().locator().withDir(currDir).execute(); } catch (const GfshExecuteException &) { } @@ -1575,9 +1578,9 @@ void CacheHelper::initLocator(int instance, bool ssl, bool, int dsId, std::string classpath = ACE_OS::getenv("GF_CLASSPATH"); + GfshExecute gfsh; auto locator = gfsh.start() .locator() - .withConnect(false) .withName(locDirname) .withPort(portnum) .withDir(currDir) diff --git a/cppcache/integration-test/CacheHelper.hpp b/cppcache/integration-test/CacheHelper.hpp index 3a5ba3ce9b..553f84c28e 100644 --- a/cppcache/integration-test/CacheHelper.hpp +++ b/cppcache/integration-test/CacheHelper.hpp @@ -31,8 +31,6 @@ #include #include -#include "framework/GfshExecute.h" - #include "TimeBomb.hpp" #include "DistributedSystemImpl.hpp" #include "Utils.hpp" @@ -54,7 +52,6 @@ class CacheHelper { public: static CacheHelper* singleton; static std::list staticConfigFileList; - static GfshExecute gfsh; std::shared_ptr cachePtr; std::shared_ptr rootRegionPtr; From a452000e93699a4e99f2520d179f669350a387e2 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sun, 27 Dec 2020 20:07:31 +0000 Subject: [PATCH 129/155] improve execution of gfsh --- clicache/integration-test/CacheHelperN.cs | 192 ++++++++-------------- tests/cli/DUnitFramework/Util.cs | 15 +- 2 files changed, 79 insertions(+), 128 deletions(-) diff --git a/clicache/integration-test/CacheHelperN.cs b/clicache/integration-test/CacheHelperN.cs index 44e6e5176c..03bf6bafbc 100644 --- a/clicache/integration-test/CacheHelperN.cs +++ b/clicache/integration-test/CacheHelperN.cs @@ -31,6 +31,7 @@ namespace Apache.Geode.Client.UnitTests using Apache.Geode.DUnitFramework; using Apache.Geode.Client; using System.Management; + using System.Threading; public class PropsStringToObject { @@ -313,13 +314,12 @@ public static string TestDir private const string DefaultDSName = "dstest"; private const string DefaultCacheName = "cachetest"; - private const string JavaServerName = "gfsh.bat"; - private const string GeodeName = "gfsh.bat"; + private const string gfsh = "gfsh.bat"; + private static Dictionary environment = new Dictionary() { { "JAVA_ARGS", "-Xmx256m" } }; private static int JavaMcastPort = -1; - private const string JavaServerStartArgs = - "start server --J=-Xmx512m --J=-Xms128m --J=-XX:+UseConcMarkSweepGC --J=-XX:+UseParNewGC --J=-Xss256k --cache-xml-file="; + private const string JavaServerStartArgs = "start server --max-heap=512m --cache-xml-file="; private const string JavaServerStopArgs = "stop server"; - private const string LocatorStartArgs = "start locator"; + private const string LocatorStartArgs = "start locator --max-heap=512m"; private const string LocatorStopArgs = "stop locator"; private const int MaxWaitMillis = 60000; private static char PathSep = Path.DirectorySeparatorChar; @@ -1817,8 +1817,6 @@ public static void StartJavaLocator(int locatorNum, string startDir, { if (m_localServer) { - Process javaProc; - string locatorPath = m_gfeDir + PathSep + "bin" + PathSep + GeodeName; Util.Log("Starting locator {0} in directory {1}.", locatorNum, startDir); string serverName = "Locator" + Util.Rand(64687687).ToString(); if (startDir != null) @@ -1877,26 +1875,8 @@ public static void StartJavaLocator(int locatorNum, string startDir, string locatorArgs = LocatorStartArgs + " --name=" + serverName + startDir + extraLocatorArgs + " --http-service-port=0"; - if (!Util.StartProcess(locatorPath, locatorArgs, false, null, true, - false, false, true, out javaProc)) - { - Assert.Fail("Failed to run the locator: {0}.", - locatorPath); - } + Assert.AreEqual(0, ExecuteGfsh(locatorArgs), "Failed to start locator."); - StreamReader outSr = javaProc.StandardOutput; - // Wait for cache server to start - bool started = javaProc.WaitForExit(MaxWaitMillis); - Util.Log("Output from '{0} {1}':{2}{3}", GeodeName, locatorArgs, - Environment.NewLine, outSr.ReadToEnd()); - outSr.Close(); - if (!started) - { - javaProc.Kill(); - } - Assert.IsTrue(started, "Timed out waiting for " + - "Locator to start.{0}Please check the locator logs.", - Environment.NewLine); m_runningLocators[locatorNum] = startDir; if (m_locators == null) { @@ -1933,8 +1913,6 @@ public static void StartJavaLocator_MDS(int locatorNum, string startDir, { if (m_localServer) { - Process javaProc; - string locatorPath = m_gfeDir + PathSep + "bin" + PathSep + GeodeName; string serverName = "Locator" + Util.Rand(64687687).ToString(); Util.Log("Starting locator {0} in directory {1}.", locatorNum, startDir); if (startDir != null) @@ -1978,27 +1956,9 @@ public static void StartJavaLocator_MDS(int locatorNum, string startDir, extraLocatorArgs = locatorPort; } string locatorArgs = LocatorStartArgs + " --name=" + serverName + startDir + extraLocatorArgs + " --http-service-port=0"; + + Assert.AreEqual(0, ExecuteGfsh(locatorArgs), "Failed to start locator MDS."); - if (!Util.StartProcess(locatorPath, locatorArgs, false, null, true, - false, false, true, out javaProc)) - { - Assert.Fail("Failed to run the locator: {0}.", - locatorPath); - } - - StreamReader outSr = javaProc.StandardOutput; - // Wait for cache server to start - bool started = javaProc.WaitForExit(MaxWaitMillis); - Util.Log("Output from '{0} {1}':{2}{3}", GeodeName, locatorArgs, - Environment.NewLine, outSr.ReadToEnd()); - outSr.Close(); - if (!started) - { - javaProc.Kill(); - } - Assert.IsTrue(started, "Timed out waiting for " + - "Locator to start.{0}Please check the locator logs.", - Environment.NewLine); m_runningLocators[locatorNum] = startDir; if (m_locators == null) { @@ -2100,8 +2060,7 @@ public static void StartJavaServer(int serverNum, string startDir, "could not find cache.xml for server number {0}", serverNum); } string cacheXml = m_cacheXmls[serverNum - 1]; - Process javaProc; - string javaServerPath = m_gfeDir + PathSep + "bin" + PathSep + JavaServerName; + Util.Log("XXXX Cache XML: {0}", cacheXml); string serverName = "Server" + Util.Rand(372468723).ToString(); startDir += serverName; int port = 0; @@ -2146,34 +2105,63 @@ public static void StartJavaServer(int serverNum, string startDir, " --server-port=" + port + " --classpath=" + classpath + " --log-level=" + m_gfeLogLevel + startDir + " --J=-Dsecurity-log-level=" + m_gfeSecLogLevel + extraServerArgs; - if (!Util.StartProcess(javaServerPath, serverArgs, false, null, true, - false, false, true, out javaProc)) - { - Assert.Fail("Failed to run the java cacheserver executable: {0}.", - javaServerPath); - } - StreamReader outSr = javaProc.StandardOutput; - // Wait for cache server to start - bool started = javaProc.WaitForExit(MaxWaitMillis); - Util.Log("Output from '{0} {1}':{2}{3}", JavaServerName, serverArgs, - Environment.NewLine, outSr.ReadToEnd()); - outSr.Close(); - if (!started) + Assert.AreEqual(0, ExecuteGfsh(serverArgs), "Failed to start server."); + + m_runningJavaServers[serverNum] = startDir; + } + } + + public static int ExecuteGfsh(string command) + { + Util.Log("ExecuteGfsh: {0}", command); + + string javaServerPath = m_gfeDir + PathSep + "bin" + PathSep + gfsh; + //Process process; + //Util.StartProcess(javaServerPath, command, false, null, true, true, true, true, environment, out process); + using(Process process = new Process()) + { + process.StartInfo.FileName = javaServerPath; + process.StartInfo.Arguments = command; + process.StartInfo.UseShellExecute = false; + process.StartInfo.RedirectStandardOutput = false; + process.StartInfo.RedirectStandardError = false; + process.StartInfo.EnvironmentVariables["JAVA_ARGS"] = "-Xmx256m"; + + //process.OutputDataReceived += (sender, e) => + //{ + // if (!string.IsNullOrEmpty(e.Data)) + // { + // Util.Log("Execute Gfsh stdout: {0}", e.Data); + // } + //}; + //process.ErrorDataReceived += (sender, e) => + //{ + // if (!string.IsNullOrEmpty(e.Data)) + // { + // Util.Log("Execute Gfsh stderr: {0}", e.Data); + // } + //}; + + process.Start(); + + //process.BeginOutputReadLine(); + //process.BeginErrorReadLine(); + + if (!process.WaitForExit(MaxWaitMillis)) { try { - javaProc.Kill(); - } - catch + process.Kill(); + } catch (Exception) { - //ignore } } - Assert.IsTrue(started, "Timed out waiting for " + - "Java cacheserver to start.{0}Please check the server logs.", - Environment.NewLine); - m_runningJavaServers[serverNum] = startDir; + + //process.CancelOutputRead(); + //process.CancelOutputRead(); + + return process.ExitCode; } } @@ -2196,8 +2184,6 @@ public static void StopJavaLocator(int locatorNum, bool verifyLocator, bool ssl) if (m_runningLocators.TryGetValue(locatorNum, out startDir)) { Util.Log("Stopping locator {0} in directory {1}.", locatorNum, startDir); - Process javaStopProc; - string javaLocatorPath = m_gfeDir + PathSep + "bin" + PathSep + GeodeName; string sslArgs = String.Empty; if (ssl) { @@ -2209,37 +2195,14 @@ public static void StopJavaLocator(int locatorNum, bool verifyLocator, bool ssl) string propdir = startDir.Replace("--dir=", string.Empty).Trim(); File.Copy(propdir + "/geode.properties", Directory.GetCurrentDirectory() + "/geode.properties", true); } - if (!Util.StartProcess(javaLocatorPath, LocatorStopArgs + startDir + sslArgs, - false, null, true, false, false, true, out javaStopProc)) - { - Assert.Fail("Failed to run the executable: {0}.", - javaLocatorPath); - } - StreamReader outSr = javaStopProc.StandardOutput; - // Wait for cache server to stop - bool stopped = javaStopProc.WaitForExit(MaxWaitMillis); - Util.Log("Output from '{0} stop-locator':{1}{2}", GeodeName, - Environment.NewLine, outSr.ReadToEnd()); - outSr.Close(); - if (!stopped) - { - try - { - javaStopProc.Kill(); - } - catch - { - //ignore - } - } + var exitCode = ExecuteGfsh(LocatorStopArgs + startDir + sslArgs); + //Assert.AreEqual(0, exitCode, "Failed to stop locator."); + if (ssl) { File.Delete(Directory.GetCurrentDirectory() + "/geode.properties"); } - Assert.IsTrue(stopped, "Timed out waiting for " + - "Java locator to stop.{0}Please check the locator logs.", - Environment.NewLine); m_runningLocators.Remove(locatorNum); Util.Log("Locator {0} in directory {1} stopped.", locatorNum, startDir.Replace("--dir=", string.Empty).Trim()); @@ -2269,35 +2232,10 @@ public static void StopJavaServer(int serverNum, bool verifyServer) if (m_runningJavaServers.TryGetValue(serverNum, out startDir)) { Util.Log("Stopping server {0} in directory {1}.", serverNum, startDir); - Process javaStopProc; - string javaServerPath = m_gfeDir + PathSep + "bin" + PathSep + JavaServerName; - if (!Util.StartProcess(javaServerPath, JavaServerStopArgs + startDir, - false, null, true, false, false, true, out javaStopProc)) - { - Assert.Fail("Failed to run the java cacheserver executable: {0}.", - javaServerPath); - } - StreamReader outSr = javaStopProc.StandardOutput; - // Wait for cache server to stop - bool stopped = javaStopProc.WaitForExit(MaxWaitMillis); - Util.Log("Output from '{0} stop':{1}{2}", JavaServerName, - Environment.NewLine, outSr.ReadToEnd()); - outSr.Close(); - if (!stopped) - { - try - { - javaStopProc.Kill(); - } - catch - { - //ignore - } - } - Assert.IsTrue(stopped, "Timed out waiting for " + - "Java cacheserver to stop.{0}Please check the server logs.", - Environment.NewLine); + var exitCode = ExecuteGfsh(JavaServerStopArgs + startDir); + //Assert.AreEqual(0, exitCode, "Failed to stop server."); + m_runningJavaServers.Remove(serverNum); Util.Log("Server {0} in directory {1} stopped.", serverNum, startDir.Replace("--dir=", string.Empty).Trim()); diff --git a/tests/cli/DUnitFramework/Util.cs b/tests/cli/DUnitFramework/Util.cs index 95e588c260..0114004d97 100644 --- a/tests/cli/DUnitFramework/Util.cs +++ b/tests/cli/DUnitFramework/Util.cs @@ -811,16 +811,29 @@ public static bool StartProcess(string procPath, string procArgs, return StartProcess(procPath, procArgs, useShell, startDir, redirectStdOut, redirectStdIn, redirectStdErr, false, out proc); } - + public static bool StartProcess(string procPath, string procArgs, bool useShell, string startDir, bool redirectStdOut, bool redirectStdIn, bool redirectStdErr, bool createNoWindow, out Process proc) + { + return StartProcess(procPath, procArgs, useShell, startDir, redirectStdOut, + redirectStdIn, redirectStdErr, createNoWindow, new Dictionary(), out proc); + } + + public static bool StartProcess(string procPath, string procArgs, + bool useShell, string startDir, bool redirectStdOut, + bool redirectStdIn, bool redirectStdErr, bool createNoWindow, IDictionary environment, out Process proc) { ProcessStartInfo pInfo = new ProcessStartInfo(procPath, procArgs); // Force launch without a shell. This allows launching FwkClient.exe without a window so tests can run in CI. useShell = false; + foreach(var e in environment) + { + pInfo.EnvironmentVariables[e.Key] = e.Value; + } + if (!useShell) { if (m_externalBBServer != null || createNoWindow) From 36950e36cdb6742236437beddb8e4f89674be398 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 29 Dec 2020 04:13:19 +0000 Subject: [PATCH 130/155] cleanup on failed start/stop --- clicache/integration-test/CacheHelperN.cs | 127 ++++++++++++++-------- 1 file changed, 84 insertions(+), 43 deletions(-) diff --git a/clicache/integration-test/CacheHelperN.cs b/clicache/integration-test/CacheHelperN.cs index 03bf6bafbc..0fcbe55ac6 100644 --- a/clicache/integration-test/CacheHelperN.cs +++ b/clicache/integration-test/CacheHelperN.cs @@ -1843,7 +1843,6 @@ public static void StartJavaLocator(int locatorNum, string startDir, { Assert.Fail("Locator property file creation failed: {0}: {1}", ex.GetType().Name, ex.Message); } - startDir = " --dir=" + startDir; } else { @@ -1873,9 +1872,14 @@ public static void StartJavaLocator(int locatorNum, string startDir, extraLocatorArgs += sslArgs; } - string locatorArgs = LocatorStartArgs + " --name=" + serverName + startDir + extraLocatorArgs + " --http-service-port=0"; + string locatorArgs = LocatorStartArgs + " --name=" + serverName + " --dir=" + startDir + extraLocatorArgs + " --http-service-port=0"; - Assert.AreEqual(0, ExecuteGfsh(locatorArgs), "Failed to start locator."); + var exitCode = ExecuteGfsh(locatorArgs); + if (0 != exitCode) + { + KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + } + Assert.AreEqual(0, exitCode, "Failed to start locator."); m_runningLocators[locatorNum] = startDir; if (m_locators == null) @@ -1934,7 +1938,6 @@ public static void StartJavaLocator_MDS(int locatorNum, string startDir, { Assert.Fail("Locator property file creation failed: {0}: {1}", ex.GetType().Name, ex.Message); } - startDir = " --dir=" + startDir; } else { @@ -1955,9 +1958,14 @@ public static void StartJavaLocator_MDS(int locatorNum, string startDir, { extraLocatorArgs = locatorPort; } - string locatorArgs = LocatorStartArgs + " --name=" + serverName + startDir + extraLocatorArgs + " --http-service-port=0"; + string locatorArgs = LocatorStartArgs + " --name=" + serverName + " --dir=" + startDir + extraLocatorArgs + " --http-service-port=0"; - Assert.AreEqual(0, ExecuteGfsh(locatorArgs), "Failed to start locator MDS."); + var exitCode = ExecuteGfsh(locatorArgs); + if (0 != exitCode) + { + KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + } + Assert.AreEqual(0, exitCode, "Failed to start locator MDS."); m_runningLocators[locatorNum] = startDir; if (m_locators == null) @@ -2060,7 +2068,6 @@ public static void StartJavaServer(int serverNum, string startDir, "could not find cache.xml for server number {0}", serverNum); } string cacheXml = m_cacheXmls[serverNum - 1]; - Util.Log("XXXX Cache XML: {0}", cacheXml); string serverName = "Server" + Util.Rand(372468723).ToString(); startDir += serverName; int port = 0; @@ -2088,7 +2095,6 @@ public static void StartJavaServer(int serverNum, string startDir, { Directory.CreateDirectory(startDir); } - startDir = " --dir=" + startDir; } else { @@ -2103,10 +2109,15 @@ public static void StartJavaServer(int serverNum, string startDir, string serverArgs = JavaServerStartArgs + cacheXml + " --name=" + serverName + " --server-port=" + port + " --classpath=" + classpath + - " --log-level=" + m_gfeLogLevel + startDir + + " --log-level=" + m_gfeLogLevel + " --dir=" + startDir + " --J=-Dsecurity-log-level=" + m_gfeSecLogLevel + extraServerArgs; - Assert.AreEqual(0, ExecuteGfsh(serverArgs), "Failed to start server."); + var exitCode = ExecuteGfsh(serverArgs); + if (0 != exitCode) + { + KillPidFile(Path.Combine(startDir, "vf.gf.server.pid")); + } + Assert.AreEqual(0, exitCode, "Failed to start server."); m_runningJavaServers[serverNum] = startDir; } @@ -2116,42 +2127,43 @@ public static int ExecuteGfsh(string command) { Util.Log("ExecuteGfsh: {0}", command); - string javaServerPath = m_gfeDir + PathSep + "bin" + PathSep + gfsh; - //Process process; - //Util.StartProcess(javaServerPath, command, false, null, true, true, true, true, environment, out process); - using(Process process = new Process()) + using(var process = new Process()) { + var javaServerPath = m_gfeDir + PathSep + "bin" + PathSep + gfsh; process.StartInfo.FileName = javaServerPath; process.StartInfo.Arguments = command; process.StartInfo.UseShellExecute = false; - process.StartInfo.RedirectStandardOutput = false; - process.StartInfo.RedirectStandardError = false; + process.StartInfo.RedirectStandardOutput = true; + process.StartInfo.RedirectStandardError = true; + process.StartInfo.CreateNoWindow = true; process.StartInfo.EnvironmentVariables["JAVA_ARGS"] = "-Xmx256m"; - //process.OutputDataReceived += (sender, e) => - //{ - // if (!string.IsNullOrEmpty(e.Data)) - // { - // Util.Log("Execute Gfsh stdout: {0}", e.Data); - // } - //}; - //process.ErrorDataReceived += (sender, e) => - //{ - // if (!string.IsNullOrEmpty(e.Data)) - // { - // Util.Log("Execute Gfsh stderr: {0}", e.Data); - // } - //}; + process.OutputDataReceived += (sender, e) => + { + if (!string.IsNullOrEmpty(e.Data)) + { + Util.Log("Execute Gfsh stdout: {0}", e.Data); + } + }; + process.ErrorDataReceived += (sender, e) => + { + if (!string.IsNullOrEmpty(e.Data)) + { + Util.Log("Execute Gfsh stderr: {0}", e.Data); + } + }; process.Start(); - //process.BeginOutputReadLine(); - //process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + process.BeginErrorReadLine(); + Util.Log("ExecuteGfsh: Waiting for exit {0}", process.Id); if (!process.WaitForExit(MaxWaitMillis)) { try { + Util.Log("ExecuteGfsh: Timeout, killing {0}", process.Id); process.Kill(); } catch (Exception) { @@ -2161,6 +2173,7 @@ public static int ExecuteGfsh(string command) //process.CancelOutputRead(); //process.CancelOutputRead(); + Util.Log("ExecuteGfsh: Exited {0}", process.Id); return process.ExitCode; } } @@ -2192,20 +2205,21 @@ public static void StopJavaLocator(int locatorNum, bool verifyLocator, bool ssl) sslArgs += " --J=-Djavax.net.ssl.keyStorePassword=gemstone "; sslArgs += " --J=-Djavax.net.ssl.trustStore=" + keystore + "/server_truststore.jks "; sslArgs += " --J=-Djavax.net.ssl.trustStorePassword=gemstone "; - string propdir = startDir.Replace("--dir=", string.Empty).Trim(); - File.Copy(propdir + "/geode.properties", Directory.GetCurrentDirectory() + "/geode.properties", true); + File.Copy(startDir + "/geode.properties", Directory.GetCurrentDirectory() + "/geode.properties", true); } - var exitCode = ExecuteGfsh(LocatorStopArgs + startDir + sslArgs); - //Assert.AreEqual(0, exitCode, "Failed to stop locator."); + var exitCode = ExecuteGfsh(LocatorStopArgs + " --dir=" + startDir + sslArgs); + if (0 != exitCode) + { + KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + } if (ssl) { File.Delete(Directory.GetCurrentDirectory() + "/geode.properties"); } m_runningLocators.Remove(locatorNum); - Util.Log("Locator {0} in directory {1} stopped.", locatorNum, - startDir.Replace("--dir=", string.Empty).Trim()); + Util.Log("Locator {0} in directory {1} stopped.", locatorNum, startDir); } else { @@ -2233,12 +2247,14 @@ public static void StopJavaServer(int serverNum, bool verifyServer) { Util.Log("Stopping server {0} in directory {1}.", serverNum, startDir); - var exitCode = ExecuteGfsh(JavaServerStopArgs + startDir); - //Assert.AreEqual(0, exitCode, "Failed to stop server."); + var exitCode = ExecuteGfsh(JavaServerStopArgs + " --dir=" + startDir); + if (0 != exitCode) + { + KillPidFile(Path.Combine(startDir, "vf.gf.server.pid")); + } m_runningJavaServers.Remove(serverNum); - Util.Log("Server {0} in directory {1} stopped.", serverNum, - startDir.Replace("--dir=", string.Empty).Trim()); + Util.Log("Server {0} in directory {1} stopped.", serverNum, startDir); } else { @@ -2251,6 +2267,31 @@ public static void StopJavaServer(int serverNum, bool verifyServer) } } + private static void KillPidFile(string pidFile) + { + if (File.Exists(pidFile)) + { + Util.Log(Util.LogLevel.Info, "PID file {0} found.", pidFile); + var pid = int.Parse(File.ReadAllText(pidFile)); + try + { + using (var process = Process.GetProcessById(pid)) + { + Util.Log(Util.LogLevel.Warning, "Killing process {0}.", pid); + process.Kill(); + if (!process.WaitForExit(MaxWaitMillis)) + { + Util.Log(Util.LogLevel.Error, "Failed to kill {0}.", pid); + } + } + } + catch (ArgumentException) + { + Util.Log(Util.LogLevel.Info, "Process {0} does not exist.", pid); + } + } + } + public static void StopJavaServers() { int[] runningServers = new int[m_runningJavaServers.Count]; @@ -2336,7 +2377,7 @@ public static void EndTest() StopJavaLocators(); ClearEndpoints(); ClearLocators(); - KillJavaProcesses(); + //KillJavaProcesses(); Util.Log("Cache Helper EndTest completed."); } From 55e837f2b8c4d768fc72d666322d4c530244f72b Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 29 Dec 2020 05:09:35 +0000 Subject: [PATCH 131/155] use library random directories to avoid collision --- clicache/integration-test/CacheHelperN.cs | 160 +++++++++------------- 1 file changed, 66 insertions(+), 94 deletions(-) diff --git a/clicache/integration-test/CacheHelperN.cs b/clicache/integration-test/CacheHelperN.cs index 0fcbe55ac6..77e94218f5 100644 --- a/clicache/integration-test/CacheHelperN.cs +++ b/clicache/integration-test/CacheHelperN.cs @@ -311,25 +311,22 @@ public static string TestDir private static bool m_localServer = true; private static string m_extraPropertiesFile = null; - private const string DefaultDSName = "dstest"; - private const string DefaultCacheName = "cachetest"; - - private const string gfsh = "gfsh.bat"; - private static Dictionary environment = new Dictionary() { { "JAVA_ARGS", "-Xmx256m" } }; + private static string gfsh = null; private static int JavaMcastPort = -1; private const string JavaServerStartArgs = "start server --max-heap=512m --cache-xml-file="; private const string JavaServerStopArgs = "stop server"; private const string LocatorStartArgs = "start locator --max-heap=512m"; private const string LocatorStopArgs = "stop locator"; private const int MaxWaitMillis = 60000; - private static char PathSep = Path.DirectorySeparatorChar; - + private static string m_testDir = null; private static Dictionary m_runningJavaServers = new Dictionary(); private static Dictionary m_runningLocators = new Dictionary(); private static CacheTransactionManager m_cstxManager = null; + + private static readonly string tempDirectoryRoot = Path.Combine(Directory.GetCurrentDirectory(), Path.GetRandomFileName()); #endregion #region Public accessors @@ -562,10 +559,9 @@ public static void InitConfig(string cacheXml) public static void InitConfig(Properties config, string cacheXml, IAuthInitialize authIntialize) { - //Console.WriteLine(" in InitConfig1 " + System.AppDomain.CurrentDomain.Id); if (cacheXml != null) { - string duplicateXMLFile = Util.Rand(3536776).ToString() + cacheXml; + var duplicateXMLFile = Path.Combine(makeTempDirectory(), cacheXml); createDuplicateXMLFile(cacheXml, duplicateXMLFile); cacheXml = duplicateXMLFile; } @@ -1691,6 +1687,7 @@ public static void SetupJavaServers(bool locators, params string[] cacheXmls) m_gfeDir = Util.GetEnvironmentVariable("GFE_DIR"); Assert.IsNotNull(m_gfeDir, "GFE_DIR is not set."); Assert.IsNotEmpty(m_gfeDir, "GFE_DIR is not set."); + gfsh = Path.Combine(m_gfeDir, "bin", "gfsh.bat"); m_gfeLogLevel = Util.GetEnvironmentVariable("GFE_LOGLEVEL"); m_gfeSecLogLevel = Util.GetEnvironmentVariable("GFE_SECLOGLEVEL"); if (m_gfeLogLevel == null || m_gfeLogLevel.Length == 0) @@ -1726,17 +1723,12 @@ public static void SetupJavaServers(bool locators, params string[] cacheXmls) string cacheXml = cacheXmls[i]; Assert.IsNotNull(cacheXml, "cacheXml is not set for Java cacheserver."); Assert.IsNotEmpty(cacheXml, "cacheXml is not set for Java cacheserver."); - string duplicateFile = ""; - // Assume the GFE_DIR is for a local server - if (cacheXml.IndexOf(PathSep) < 0) - { - duplicateFile = Directory.GetCurrentDirectory() + PathSep + Util.Rand(2342350).ToString() + cacheXml; - cacheXml = Directory.GetCurrentDirectory() + PathSep + cacheXml; - createDuplicateXMLFile(cacheXml, duplicateFile); - //:create duplicate xml files - cacheXmls[i] = duplicateFile; - } - + + var duplicateFile = Path.Combine(makeTempDirectory(), cacheXml); + cacheXml = Path.Combine(Directory.GetCurrentDirectory(), cacheXml); + createDuplicateXMLFile(cacheXml, duplicateFile); + cacheXmls[i] = duplicateFile; + // Find the port number from the given cache.xml XmlDocument xmlDoc = new XmlDocument(); xmlDoc.XmlResolver = null; @@ -1801,54 +1793,42 @@ public static void createDuplicateXMLFile(string orignalFilename, string duplica File.WriteAllText(duplicateFilename, cachexmlstring); } - public static void StartJavaLocator(int locatorNum, string startDir) + public static void StartJavaLocator(int locatorNum, string locatorName) { - StartJavaLocator(locatorNum, startDir, null); + StartJavaLocator(locatorNum, locatorName, null); } - public static void StartJavaLocator(int locatorNum, string startDir, + public static void StartJavaLocator(int locatorNum, string locatorName, string extraLocatorArgs) { - StartJavaLocator(locatorNum, startDir, extraLocatorArgs, false); + StartJavaLocator(locatorNum, locatorName, extraLocatorArgs, false); } - public static void StartJavaLocator(int locatorNum, string startDir, + public static void StartJavaLocator(int locatorNum, string locatorName, string extraLocatorArgs, bool ssl) { if (m_localServer) { + var startDir = makeTempDirectory(); Util.Log("Starting locator {0} in directory {1}.", locatorNum, startDir); - string serverName = "Locator" + Util.Rand(64687687).ToString(); - if (startDir != null) + try { - startDir += serverName; - if (!Directory.Exists(startDir)) - { - Directory.CreateDirectory(startDir); - } - try - { - TextWriter tw = new StreamWriter(Directory.GetCurrentDirectory() + "\\" + startDir + "\\geode.properties", false); - tw.WriteLine("locators=localhost[{0}],localhost[{1}],localhost[{2}]", LOCATOR_PORT_1, LOCATOR_PORT_2, LOCATOR_PORT_3); - if (ssl) - { - tw.WriteLine("ssl-enabled=true"); - tw.WriteLine("ssl-require-authentication=true"); - tw.WriteLine("ssl-ciphers=SSL_RSA_WITH_NULL_MD5"); - tw.WriteLine("mcast-port=0"); - } - tw.Close(); - } - catch (Exception ex) + TextWriter tw = new StreamWriter(Path.Combine(startDir, "geode.properties"), false); + tw.WriteLine("locators=localhost[{0}],localhost[{1}],localhost[{2}]", LOCATOR_PORT_1, LOCATOR_PORT_2, LOCATOR_PORT_3); + if (ssl) { - Assert.Fail("Locator property file creation failed: {0}: {1}", ex.GetType().Name, ex.Message); + tw.WriteLine("ssl-enabled=true"); + tw.WriteLine("ssl-require-authentication=true"); + tw.WriteLine("ssl-ciphers=SSL_RSA_WITH_NULL_MD5"); + tw.WriteLine("mcast-port=0"); } + tw.Close(); } - else + catch (Exception ex) { - startDir = string.Empty; + Assert.Fail("Locator property file creation failed: {0}: {1}", ex.GetType().Name, ex.Message); } - + string locatorPort = " --port=" + getLocatorPort(locatorNum); if (extraLocatorArgs != null) { @@ -1872,7 +1852,7 @@ public static void StartJavaLocator(int locatorNum, string startDir, extraLocatorArgs += sslArgs; } - string locatorArgs = LocatorStartArgs + " --name=" + serverName + " --dir=" + startDir + extraLocatorArgs + " --http-service-port=0"; + string locatorArgs = LocatorStartArgs + " --name=" + locatorName + " --dir=" + startDir + extraLocatorArgs + " --http-service-port=0"; var exitCode = ExecuteGfsh(locatorArgs); if (0 != exitCode) @@ -1912,36 +1892,24 @@ static int getLocatorPort(int num) } //this is for start locator independetly(will not see each other) - public static void StartJavaLocator_MDS(int locatorNum, string startDir, + public static void StartJavaLocator_MDS(int locatorNum, string locatorName, string extraLocatorArgs, int dsId) { if (m_localServer) { - string serverName = "Locator" + Util.Rand(64687687).ToString(); + var startDir = makeTempDirectory(); Util.Log("Starting locator {0} in directory {1}.", locatorNum, startDir); - if (startDir != null) + try { - startDir += serverName; - if (!Directory.Exists(startDir)) - { - Directory.CreateDirectory(startDir); - } - try - { - TextWriter tw = new StreamWriter(Directory.GetCurrentDirectory() + "\\" + startDir + "\\geode.properties", false); - //tw.WriteLine("locators=localhost[{0}],localhost[{1}],localhost[{2}]", LOCATOR_PORT_1, LOCATOR_PORT_2, LOCATOR_PORT_3); - tw.WriteLine("distributed-system-id=" + dsId); - tw.WriteLine("mcast-port=0"); - tw.Close(); - } - catch (Exception ex) - { - Assert.Fail("Locator property file creation failed: {0}: {1}", ex.GetType().Name, ex.Message); - } + TextWriter tw = new StreamWriter(Path.Combine(startDir, "geode.properties"), false); + //tw.WriteLine("locators=localhost[{0}],localhost[{1}],localhost[{2}]", LOCATOR_PORT_1, LOCATOR_PORT_2, LOCATOR_PORT_3); + tw.WriteLine("distributed-system-id=" + dsId); + tw.WriteLine("mcast-port=0"); + tw.Close(); } - else + catch (Exception ex) { - startDir = string.Empty; + Assert.Fail("Locator property file creation failed: {0}: {1}", ex.GetType().Name, ex.Message); } if (dsId == 1) @@ -1958,9 +1926,9 @@ public static void StartJavaLocator_MDS(int locatorNum, string startDir, { extraLocatorArgs = locatorPort; } - string locatorArgs = LocatorStartArgs + " --name=" + serverName + " --dir=" + startDir + extraLocatorArgs + " --http-service-port=0"; + string locatorArgs = LocatorStartArgs + " --name=" + locatorName + " --dir=" + startDir + extraLocatorArgs + " --http-service-port=0"; - var exitCode = ExecuteGfsh(locatorArgs); + var exitCode = ExecuteGfsh(locatorArgs); if (0 != exitCode) { KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); @@ -1980,12 +1948,12 @@ public static void StartJavaLocator_MDS(int locatorNum, string startDir, } } - public static void StartJavaServerWithLocators(int serverNum, string startDir, int numLocators) + public static void StartJavaServerWithLocators(int serverNum, string serverName, int numLocators) { - StartJavaServerWithLocators(serverNum, startDir, numLocators, false); + StartJavaServerWithLocators(serverNum, serverName, numLocators, false); } - public static void StartJavaServerWithLocators(int serverNum, string startDir, int numLocators, bool ssl) + public static void StartJavaServerWithLocators(int serverNum, string serverName, int numLocators, bool ssl) { string extraServerArgs = "--locators="; for (int locator = 0; locator < numLocators; locator++) @@ -2007,31 +1975,31 @@ public static void StartJavaServerWithLocators(int serverNum, string startDir, i sslArgs += " -J=-Djavax.net.ssl.trustStorePassword=gemstone "; extraServerArgs += sslArgs; } - StartJavaServer(serverNum, startDir, extraServerArgs); + StartJavaServer(serverNum, serverName, extraServerArgs); } //this is to start multiple DS - public static void StartJavaServerWithLocator_MDS(int serverNum, string startDir, int locatorNumber) + public static void StartJavaServerWithLocator_MDS(int serverNum, string serverName, int locatorNumber) { string extraServerArgs = "--locators="; extraServerArgs += "localhost[" + getLocatorPort(locatorNumber) + "]"; - StartJavaServer(serverNum, startDir, extraServerArgs); + StartJavaServer(serverNum, serverName, extraServerArgs); } - public static void StartJavaServer(int serverNum, string startDir) + public static void StartJavaServer(int serverNum, string serverName) { - StartJavaServer(serverNum, startDir, null); + StartJavaServer(serverNum, serverName, null); } - public static void StartJavaServerWithLocators(int serverNum, string startDir, + public static void StartJavaServerWithLocators(int serverNum, string serverName, int numLocators, string extraServerArgs) { - StartJavaServerWithLocators(serverNum, startDir, numLocators, extraServerArgs, false); + StartJavaServerWithLocators(serverNum, serverName, numLocators, extraServerArgs, false); } - public static void StartJavaServerWithLocators(int serverNum, string startDir, + public static void StartJavaServerWithLocators(int serverNum, string serverName, int numLocators, string extraServerArgs, bool ssl) { extraServerArgs += " --locators="; @@ -2054,11 +2022,10 @@ public static void StartJavaServerWithLocators(int serverNum, string startDir, sslArgs += " --J=-Djavax.net.ssl.trustStorePassword=gemstone "; extraServerArgs += sslArgs; } - StartJavaServer(serverNum, startDir, extraServerArgs); + StartJavaServer(serverNum, serverName, extraServerArgs); } - public static void StartJavaServer(int serverNum, string startDir, - string extraServerArgs) + public static void StartJavaServer(int serverNum, string serverName, string extraServerArgs) { if (m_localServer) { @@ -2068,8 +2035,7 @@ public static void StartJavaServer(int serverNum, string startDir, "could not find cache.xml for server number {0}", serverNum); } string cacheXml = m_cacheXmls[serverNum - 1]; - string serverName = "Server" + Util.Rand(372468723).ToString(); - startDir += serverName; + var startDir = Path.Combine(makeTempDirectory()); int port = 0; switch (serverNum) { @@ -2123,14 +2089,20 @@ public static void StartJavaServer(int serverNum, string startDir, } } - public static int ExecuteGfsh(string command) + static string makeTempDirectory() + { + var tempDirectory = Path.Combine(tempDirectoryRoot, Path.GetRandomFileName()); + Directory.CreateDirectory(tempDirectory); + return tempDirectory; + } + + static int ExecuteGfsh(string command) { Util.Log("ExecuteGfsh: {0}", command); using(var process = new Process()) { - var javaServerPath = m_gfeDir + PathSep + "bin" + PathSep + gfsh; - process.StartInfo.FileName = javaServerPath; + process.StartInfo.FileName = gfsh; process.StartInfo.Arguments = command; process.StartInfo.UseShellExecute = false; process.StartInfo.RedirectStandardOutput = true; From 22a303d8d0a3d02f3c2f15a04b8f3edfac53507f Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 29 Dec 2020 05:22:26 +0000 Subject: [PATCH 132/155] cleanup --- clicache/integration-test/CacheHelperN.cs | 47 +++++++++++------------ 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/clicache/integration-test/CacheHelperN.cs b/clicache/integration-test/CacheHelperN.cs index 77e94218f5..57e9fc3e15 100644 --- a/clicache/integration-test/CacheHelperN.cs +++ b/clicache/integration-test/CacheHelperN.cs @@ -312,7 +312,6 @@ public static string TestDir private static string m_extraPropertiesFile = null; private static string gfsh = null; - private static int JavaMcastPort = -1; private const string JavaServerStartArgs = "start server --max-heap=512m --cache-xml-file="; private const string JavaServerStopArgs = "stop server"; private const string LocatorStartArgs = "start locator --max-heap=512m"; @@ -561,7 +560,7 @@ public static void InitConfig(Properties config, string cacheXml { if (cacheXml != null) { - var duplicateXMLFile = Path.Combine(makeTempDirectory(), cacheXml); + var duplicateXMLFile = Path.Combine(MakeTempDirectory(), cacheXml); createDuplicateXMLFile(cacheXml, duplicateXMLFile); cacheXml = duplicateXMLFile; } @@ -1709,14 +1708,6 @@ public static void SetupJavaServers(bool locators, params string[] cacheXmls) else if (cacheXmls != null) { // Assume the GFE_DIR is for a local server - if (locators) - { - JavaMcastPort = 0; - } - else - { - JavaMcastPort = Util.Rand(2431, 31123); - } for (int i = 0; i < cacheXmls.Length; i++) { @@ -1724,7 +1715,7 @@ public static void SetupJavaServers(bool locators, params string[] cacheXmls) Assert.IsNotNull(cacheXml, "cacheXml is not set for Java cacheserver."); Assert.IsNotEmpty(cacheXml, "cacheXml is not set for Java cacheserver."); - var duplicateFile = Path.Combine(makeTempDirectory(), cacheXml); + var duplicateFile = Path.Combine(MakeTempDirectory(), cacheXml); cacheXml = Path.Combine(Directory.GetCurrentDirectory(), cacheXml); createDuplicateXMLFile(cacheXml, duplicateFile); cacheXmls[i] = duplicateFile; @@ -1787,7 +1778,7 @@ public static void createDuplicateXMLFile(string orignalFilename, string duplica cachexmlstring = cachexmlstring.Replace("LOC_PORT1", LOCATOR_PORT_1.ToString()); cachexmlstring = cachexmlstring.Replace("LOC_PORT2", LOCATOR_PORT_2.ToString()); cachexmlstring = cachexmlstring.Replace("LOC_PORT3", LOCATOR_PORT_3.ToString()); - //cachexmlstring = cachexmlstring.Replace("LOC_PORT4", LOCATOR_PORT_4.ToString()); + cachexmlstring = cachexmlstring.Replace("LOC_PORT4", LOCATOR_PORT_4.ToString()); File.Create(duplicateFilename).Close(); File.WriteAllText(duplicateFilename, cachexmlstring); @@ -1809,7 +1800,7 @@ public static void StartJavaLocator(int locatorNum, string locatorName, { if (m_localServer) { - var startDir = makeTempDirectory(); + var startDir = MakeTempDirectory(); Util.Log("Starting locator {0} in directory {1}.", locatorNum, startDir); try { @@ -1843,7 +1834,7 @@ public static void StartJavaLocator(int locatorNum, string locatorName, if (ssl) { - string sslArgs = String.Empty; + string sslArgs = string.Empty; string keystore = Util.GetEnvironmentVariable("CPP_TESTOUT") + "/keystore"; sslArgs += " --J=-Djavax.net.ssl.keyStore=" + keystore + "/server_keystore.jks "; sslArgs += " --J=-Djavax.net.ssl.keyStorePassword=gemstone "; @@ -1857,7 +1848,7 @@ public static void StartJavaLocator(int locatorNum, string locatorName, var exitCode = ExecuteGfsh(locatorArgs); if (0 != exitCode) { - KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + KillLocatorByPidFile(startDir); } Assert.AreEqual(0, exitCode, "Failed to start locator."); @@ -1874,7 +1865,10 @@ public static void StartJavaLocator(int locatorNum, string locatorName, } } - + private static void KillLocatorByPidFile(string startDir) + { + KillByPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + } static int getLocatorPort(int num) { @@ -1897,7 +1891,7 @@ public static void StartJavaLocator_MDS(int locatorNum, string locatorName, { if (m_localServer) { - var startDir = makeTempDirectory(); + var startDir = MakeTempDirectory(); Util.Log("Starting locator {0} in directory {1}.", locatorNum, startDir); try { @@ -1931,7 +1925,7 @@ public static void StartJavaLocator_MDS(int locatorNum, string locatorName, var exitCode = ExecuteGfsh(locatorArgs); if (0 != exitCode) { - KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + KillLocatorByPidFile(startDir); } Assert.AreEqual(0, exitCode, "Failed to start locator MDS."); @@ -2035,7 +2029,7 @@ public static void StartJavaServer(int serverNum, string serverName, string extr "could not find cache.xml for server number {0}", serverNum); } string cacheXml = m_cacheXmls[serverNum - 1]; - var startDir = Path.Combine(makeTempDirectory()); + var startDir = Path.Combine(MakeTempDirectory()); int port = 0; switch (serverNum) { @@ -2081,7 +2075,7 @@ public static void StartJavaServer(int serverNum, string serverName, string extr var exitCode = ExecuteGfsh(serverArgs); if (0 != exitCode) { - KillPidFile(Path.Combine(startDir, "vf.gf.server.pid")); + KillServerByPidFile(startDir); } Assert.AreEqual(0, exitCode, "Failed to start server."); @@ -2089,7 +2083,12 @@ public static void StartJavaServer(int serverNum, string serverName, string extr } } - static string makeTempDirectory() + private static void KillServerByPidFile(string startDir) + { + KillByPidFile(Path.Combine(startDir, "vf.gf.server.pid")); + } + + static string MakeTempDirectory() { var tempDirectory = Path.Combine(tempDirectoryRoot, Path.GetRandomFileName()); Directory.CreateDirectory(tempDirectory); @@ -2183,7 +2182,7 @@ public static void StopJavaLocator(int locatorNum, bool verifyLocator, bool ssl) var exitCode = ExecuteGfsh(LocatorStopArgs + " --dir=" + startDir + sslArgs); if (0 != exitCode) { - KillPidFile(Path.Combine(startDir, "vf.gf.locator.pid")); + KillLocatorByPidFile(startDir); } if (ssl) @@ -2222,7 +2221,7 @@ public static void StopJavaServer(int serverNum, bool verifyServer) var exitCode = ExecuteGfsh(JavaServerStopArgs + " --dir=" + startDir); if (0 != exitCode) { - KillPidFile(Path.Combine(startDir, "vf.gf.server.pid")); + KillServerByPidFile(startDir); } m_runningJavaServers.Remove(serverNum); @@ -2239,7 +2238,7 @@ public static void StopJavaServer(int serverNum, bool verifyServer) } } - private static void KillPidFile(string pidFile) + private static void KillByPidFile(string pidFile) { if (File.Exists(pidFile)) { From 2132b18399c9d766dbdfbdec00fbd097dc5661d8 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 21:37:09 -0800 Subject: [PATCH 133/155] Disable broken IPv6 test. --- cppcache/integration/test/BasicIPv6Test.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cppcache/integration/test/BasicIPv6Test.cpp b/cppcache/integration/test/BasicIPv6Test.cpp index 856ba62529..51e31d94d8 100644 --- a/cppcache/integration/test/BasicIPv6Test.cpp +++ b/cppcache/integration/test/BasicIPv6Test.cpp @@ -53,7 +53,7 @@ std::shared_ptr setupRegion(Cache& cache) { * Example test using 2 servers and waiting for async tasks to synchronize using * furtures. */ -TEST(BasicIPv6Test, queryResultForRange) { +TEST(BasicIPv6Test, DISABLED_queryResultForRange) { Cluster cluster{LocatorCount{1}, ServerCount{1}, UseIpv6(true)}; cluster.start(); cluster.getGfsh() From e7c614118e5d3d121c0e6e2c9ee9b75a9a268300 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 21:38:19 -0800 Subject: [PATCH 134/155] Use CTest retry. --- ci/lib/templates.lib.txt | 17 ++++------------- ci/lib/templates.lib.yml | 8 ++++---- 2 files changed, 8 insertions(+), 17 deletions(-) diff --git a/ci/lib/templates.lib.txt b/ci/lib/templates.lib.txt index 5a27ce8d71..8b1c0f8060 100644 --- a/ci/lib/templates.lib.txt +++ b/ci/lib/templates.lib.txt @@ -70,34 +70,25 @@ function remote_upload { (@- end @) (@ def run_cpp_unit_tests(): -@) -remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true -remote_shell pkill -u ${INSTANCE_USER} || true -remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure (@- end @) (@ def run_cpp_integration_tests(): -@) -remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true -remote_shell pkill -u ${INSTANCE_USER} || true -remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --rerun-failed -E ^BasicIPv6Test +remote_shell cmake -E chdir build/cppcache/integration/test ctest -C ${CMAKE_CONFIG} -j8 --timeout=500 --output-on-failure --repeat until-pass:4 (@- end @) (@ def run_cpp_legacy_integration_tests(): -@) -remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true -remote_shell pkill -u ${INSTANCE_USER} || true -remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/cppcache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=500 --output-on-failure --repeat until-pass:4 (@- end @) (@ def run_net_unit_tests(): -@) -remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true remote_shell cmake -E chdir build/clicache/test2 ../packages/xunit.runner.console.2.4.0/tools/net452/xunit.console.exe ${CMAKE_CONFIG}/Apache.Geode.Tests2.dll -parallel all (@- end @) (@ def run_net_integration_tests(): -@) -remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true remote_shell cmake -E chdir build/clicache/integration-test2 ../packages/xunit.runner.console.2.4.0/tools/net452/xunit.console.exe ${CMAKE_CONFIG}/Apache.Geode.IntegrationTests2.dll -verbose -maxthreads 6 (@- end @) (@ def run_net_legacy_integration_tests(): -@) -remote_shell taskkill /t /f /fi "username eq ${INSTANCE_USER}" || true -remote_shell cmake -E chdir build/clicache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=1000 --output-on-failure --rerun-failed +remote_shell cmake -E chdir build/clicache/integration-test ctest -C ${CMAKE_CONFIG} -j6 --timeout=1000 --output-on-failure --repeat until-pass:4 (@- end @) diff --git a/ci/lib/templates.lib.yml b/ci/lib/templates.lib.yml index a47b8145aa..897741745b 100644 --- a/ci/lib/templates.lib.yml +++ b/ci/lib/templates.lib.yml @@ -215,7 +215,7 @@ config: - -c #@yaml/text-templated-strings - | - set -xueo pipefail + set -ueo pipefail (@= remote_functions() @) @@ -285,12 +285,12 @@ config: --- #@ def cpp_integration_test_task(build, config): -#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params, 5) +#@ return remote_task("cpp-integration-tests", config.config, run_cpp_integration_tests(), "30m", build.params) #@ end --- #@ def cpp_legacy_integration_test_task(build, config): -#@ return remote_task("cpp-legacy-integration-tests", config.config, run_cpp_legacy_integration_tests(), "1h", build.params, 5) +#@ return remote_task("cpp-legacy-integration-tests", config.config, run_cpp_legacy_integration_tests(), "1h", build.params) #@ end --- @@ -305,7 +305,7 @@ config: --- #@ def net_legacy_integration_test_task(build, config): -#@ return remote_task("net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params, 5) +#@ return remote_task("net-legacy-integration-tests", config.config, run_net_legacy_integration_tests(), "1h", build.params) #@ end --- From ee07d11c8ef9d54163663bfec10b469ebda357dc Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 22:45:12 -0800 Subject: [PATCH 135/155] Always fetch latest cmake. --- packer/linux/install-cmake.sh | 4 +--- packer/linux/install-geode.sh | 2 +- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packer/linux/install-cmake.sh b/packer/linux/install-cmake.sh index 44e6248f74..0c5c1a0557 100644 --- a/packer/linux/install-cmake.sh +++ b/packer/linux/install-cmake.sh @@ -17,11 +17,9 @@ set -x -e -o pipefail -CMAKE_VERSION=3.16.8 - tmp=`mktemp` -curl -o ${tmp} -L https://cmake.org/files/v${CMAKE_VERSION%.*}/cmake-${CMAKE_VERSION}-Linux-x86_64.sh +curl -o ${tmp} -L $(curl -s https://api.github.com/repos/Kitware/CMake/releases/latest | grep 'browser_download_url.*Linux-x86_64\.sh' | cut -d : -f 2,3 | tr -d \") bash ${tmp} --skip-license --prefix=/usr/local diff --git a/packer/linux/install-geode.sh b/packer/linux/install-geode.sh index 50ef1ae51e..b70b960685 100644 --- a/packer/linux/install-geode.sh +++ b/packer/linux/install-geode.sh @@ -17,7 +17,7 @@ set -x -e -o pipefail -GEODE_VERSION=1.12.0 +GEODE_VERSION=1.13.1 cd /usr/local curl -L "https://www.apache.org/dyn/closer.cgi?action=download&filename=geode/${GEODE_VERSION}/apache-geode-${GEODE_VERSION}.tgz" | \ From 70e43190ad2c5f59ee68a775d057709a02520edc Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 22:54:24 -0800 Subject: [PATCH 136/155] Don't be so verbose. --- ci/lib/templates.lib.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/lib/templates.lib.txt b/ci/lib/templates.lib.txt index 8b1c0f8060..0dca10dca1 100644 --- a/ci/lib/templates.lib.txt +++ b/ci/lib/templates.lib.txt @@ -60,7 +60,7 @@ function remote_download { } function remote_download_directory { - ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} tar -C $(dirname ${1}) -czf - $(basename ${1}) | tar -C ${2} -zxvf - + ssh ${SSH_OPTIONS} -i ${ssh_key_file} ${INSTANCE_USER}@${external_ip} tar -C $(dirname ${1}) -czf - $(basename ${1}) | tar -C ${2} -zxf - } function remote_upload { From 500facfc02fe2c309065e332e86265142010c250 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 23:25:04 -0800 Subject: [PATCH 137/155] Use the concourse worker's zone by default. --- ci/set-pipeline.sh | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/ci/set-pipeline.sh b/ci/set-pipeline.sh index c0345fb382..ceb0975950 100755 --- a/ci/set-pipeline.sh +++ b/ci/set-pipeline.sh @@ -28,7 +28,7 @@ Parameter Description Default --repository Remote URL for repository. Current tracking branch repository. --pipeline Name of pipeline to set. Based on repository owner name and branch. --google-zone Google Compute project. Current default project. ---google-project Google Compute zone. Current default zone. +--google-project Google Compute zone. Concourse worker's zone. --google-storage-bucket Google Compute Storage bucket. Based on google-project value. --google-storage-key Google Compute Storage key prefix. Based on pipeline value. --fly Path to fly executable. "fly" @@ -85,7 +85,7 @@ pipeline=${pipeline:-${git_owner}-${branch}} pipeline=${pipeline//[^[:word:]-]/-} google_project=${google_project:-$(gcloud config get-value project)} -google_zone=${google_zone:-$(gcloud config get-value compute/zone)} +google_zone=${google_zone:-'$(curl "http://metadata.google.internal/computeMetadata/v1/instance/zone" -H "Metadata-Flavor: Google" -s | cut -d / -f 4)'} google_storage_bucket=${google_storage_bucket:-${google_project}-concourse} google_storage_key=${google_storage_key:-geode-native/${pipeline}} @@ -95,22 +95,24 @@ variants_release=${variant_release:-""} for variant in ${variants}; do eval pipeline_suffix=\${variants_${variant}-"-${variant}"} - bash -c "${ytt} \$@" ytt \ + bash -c "${ytt} \"\$@\"" ytt \ --file lib \ --file base \ --file ${variant} \ - --data-value pipeline.name=${pipeline} \ - --data-value pipeline.variant=${variant} \ - --data-value repository.url=${repository} \ - --data-value repository.branch=${branch} \ - --data-value google.project=${google_project} \ - --data-value google.zone=${google_zone} \ - --data-value google.storage.bucket=${google_storage_bucket} \ - --data-value google.storage.key=${google_storage_key} \ - > ${output}/${variant}.yml - - - bash -c "${fly} \$@" fly --target=${target} \ - set-pipeline --pipeline="${pipeline}${pipeline_suffix}" --config=${output}/${variant}.yml + --data-value "pipeline.name=${pipeline}" \ + --data-value "pipeline.variant=${variant}" \ + --data-value "repository.url=${repository}" \ + --data-value "repository.branch=${branch}" \ + --data-value "google.project=${google_project}" \ + --data-value "google.zone=${google_zone}" \ + --data-value "google.storage.bucket=${google_storage_bucket}" \ + --data-value "google.storage.key=${google_storage_key}" \ + > "${output}/${variant}.yml" + + + bash -c "${fly} \"\$@\"" fly --target=${target} \ + set-pipeline \ + "--pipeline=${pipeline}${pipeline_suffix}" \ + "--config=${output}/${variant}.yml" done \ No newline at end of file From 753e39fa115b3cfd66e04fb687435790e699b98a Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 23:35:25 -0800 Subject: [PATCH 138/155] Pure virtual interfaces should define virtual destructors. --- cppcache/include/geode/AuthenticatedView.hpp | 2 +- cppcache/include/geode/Cache.hpp | 2 +- cppcache/include/geode/GeodeCache.hpp | 5 ++--- cppcache/include/geode/RegionService.hpp | 2 ++ cppcache/src/Cache.cpp | 1 - 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cppcache/include/geode/AuthenticatedView.hpp b/cppcache/include/geode/AuthenticatedView.hpp index 89ef1fa498..f002885d7f 100644 --- a/cppcache/include/geode/AuthenticatedView.hpp +++ b/cppcache/include/geode/AuthenticatedView.hpp @@ -90,7 +90,7 @@ class APACHE_GEODE_EXPORT AuthenticatedView : public RegionService { /** * @brief destructor */ - virtual ~AuthenticatedView(); + ~AuthenticatedView() override; /** * @brief constructors diff --git a/cppcache/include/geode/Cache.hpp b/cppcache/include/geode/Cache.hpp index 028a6d53d3..0ed5985ebd 100644 --- a/cppcache/include/geode/Cache.hpp +++ b/cppcache/include/geode/Cache.hpp @@ -261,7 +261,7 @@ class APACHE_GEODE_EXPORT Cache : public GeodeCache { LogLevel getLogLevel(); Cache() = delete; - virtual ~Cache(); + ~Cache() override; Cache(const Cache& other) = delete; Cache& operator=(const Cache& other) = delete; Cache(Cache&& other) noexcept; diff --git a/cppcache/include/geode/GeodeCache.hpp b/cppcache/include/geode/GeodeCache.hpp index 85da7329a9..711cec9301 100644 --- a/cppcache/include/geode/GeodeCache.hpp +++ b/cppcache/include/geode/GeodeCache.hpp @@ -46,10 +46,9 @@ class SystemProperties; */ class APACHE_GEODE_EXPORT GeodeCache : public RegionService { - /** - * @brief public methods - */ public: + ~GeodeCache() override = default; + /** Returns the name of this cache. * @return the string name of this cache */ diff --git a/cppcache/include/geode/RegionService.hpp b/cppcache/include/geode/RegionService.hpp index c6df9960ed..d1ff18a944 100644 --- a/cppcache/include/geode/RegionService.hpp +++ b/cppcache/include/geode/RegionService.hpp @@ -64,6 +64,8 @@ class APACHE_GEODE_EXPORT RegionService { * @brief public methods */ public: + virtual ~RegionService() = default; + /** * Indicates if this cache has been closed. * After a new cache object is created, this method returns false; diff --git a/cppcache/src/Cache.cpp b/cppcache/src/Cache.cpp index 50552aca44..1112c8de26 100644 --- a/cppcache/src/Cache.cpp +++ b/cppcache/src/Cache.cpp @@ -21,7 +21,6 @@ #include #include #include -#include #include "CacheImpl.hpp" #include "CacheRegionHelper.hpp" From 5a74eb8d4d31c4e2c189cb45984d43f2522a5417 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Mon, 28 Dec 2020 23:35:47 -0800 Subject: [PATCH 139/155] Nothing on CacheImpl should be virtual. --- cppcache/src/CacheImpl.hpp | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/cppcache/src/CacheImpl.hpp b/cppcache/src/CacheImpl.hpp index 62ddecb02e..cb670b226c 100644 --- a/cppcache/src/CacheImpl.hpp +++ b/cppcache/src/CacheImpl.hpp @@ -180,20 +180,14 @@ class APACHE_GEODE_EXPORT CacheImpl { */ std::vector> rootRegions(); - virtual RegionFactory createRegionFactory(RegionShortcut preDefinedRegion); + RegionFactory createRegionFactory(RegionShortcut preDefinedRegion); void initializeDeclarativeCache(const std::string& cacheXml); std::shared_ptr getCacheTransactionManager(); - /** - * @brief destructor - */ - virtual ~CacheImpl(); + ~CacheImpl(); - /** - * @brief constructors - */ CacheImpl(Cache* c, const std::shared_ptr& dsProps, bool ignorePdxUnreadFields, bool readPdxSerialized, const std::shared_ptr& authInitialize); @@ -289,14 +283,14 @@ class APACHE_GEODE_EXPORT CacheImpl { return *(m_statisticsManager.get()); } - virtual DataOutput createDataOutput() const; + DataOutput createDataOutput() const; - virtual DataOutput createDataOutput(Pool* pool) const; + DataOutput createDataOutput(Pool* pool) const; - virtual DataInput createDataInput(const uint8_t* buffer, size_t len) const; + DataInput createDataInput(const uint8_t* buffer, size_t len) const; - virtual DataInput createDataInput(const uint8_t* buffer, size_t len, - Pool* pool) const; + DataInput createDataInput(const uint8_t* buffer, size_t len, + Pool* pool) const; PdxInstanceFactory createPdxInstanceFactory(const std::string& className, bool expectDomainClass) const; From a09cb0d29fbb520e1716b86d7fb1ec513aa0d4cb Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Tue, 29 Dec 2020 18:42:34 -0800 Subject: [PATCH 140/155] Done generate debug logs when testing. --- cppcache/integration/test/AuthInitializeTest.cpp | 2 +- cppcache/integration/test/CacheXmlTest.cpp | 2 +- cppcache/integration/test/CqPlusAuthInitializeTest.cpp | 2 +- cppcache/integration/test/FunctionExecutionTest.cpp | 2 +- cppcache/integration/test/PartitionRegionOpsTest.cpp | 2 +- cppcache/integration/test/PdxInstanceFactoryTest.cpp | 2 +- cppcache/integration/test/RegionPutAllTest.cpp | 2 +- cppcache/integration/test/SslTwoWayTest.cpp | 2 +- cppcache/integration/test/TransactionsTest.cpp | 2 +- cppcache/integration/test/WanDeserializationTest.cpp | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/cppcache/integration/test/AuthInitializeTest.cpp b/cppcache/integration/test/AuthInitializeTest.cpp index 7967571f71..af85312b42 100644 --- a/cppcache/integration/test/AuthInitializeTest.cpp +++ b/cppcache/integration/test/AuthInitializeTest.cpp @@ -68,7 +68,7 @@ const int32_t CQ_PLUS_AUTH_TEST_REGION_ENTRY_COUNT = 100000; Cache createCache(std::shared_ptr auth) { auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .set("log-file", "geode_native.log") .set("statistic-sampling-enabled", "false") .setAuthInitialize(auth) diff --git a/cppcache/integration/test/CacheXmlTest.cpp b/cppcache/integration/test/CacheXmlTest.cpp index c70c9f97ea..62be7aa126 100644 --- a/cppcache/integration/test/CacheXmlTest.cpp +++ b/cppcache/integration/test/CacheXmlTest.cpp @@ -44,7 +44,7 @@ apache::geode::client::Cache createCacheUsingXmlConfig( CacheFactory cacheFactory; - auto cache = cacheFactory.set("log-level", "debug") + auto cache = cacheFactory.set("log-level", "none") .set("log-file", "geode_native.log") .set("statistic-sampling-enabled", "false") .set("cache-xml-file", xmlFile.c_str()) diff --git a/cppcache/integration/test/CqPlusAuthInitializeTest.cpp b/cppcache/integration/test/CqPlusAuthInitializeTest.cpp index f29b0383b5..a1358a38c1 100644 --- a/cppcache/integration/test/CqPlusAuthInitializeTest.cpp +++ b/cppcache/integration/test/CqPlusAuthInitializeTest.cpp @@ -64,7 +64,7 @@ const int32_t CQ_PLUS_AUTH_TEST_REGION_ENTRY_COUNT = 50000; Cache createCache(std::shared_ptr auth) { auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .set("log-file", "geode_native.log") .set("statistic-sampling-enabled", "false") .setAuthInitialize(auth) diff --git a/cppcache/integration/test/FunctionExecutionTest.cpp b/cppcache/integration/test/FunctionExecutionTest.cpp index f1cc760f29..73d8c455e4 100644 --- a/cppcache/integration/test/FunctionExecutionTest.cpp +++ b/cppcache/integration/test/FunctionExecutionTest.cpp @@ -301,7 +301,7 @@ TEST(FunctionExecutionTest, OnServersOneServerGoesDown) { .execute(); }); - auto cache = CacheFactory().set("log-level", "debug").create(); + auto cache = CacheFactory().set("log-level", "none").create(); auto poolFactory = cache.getPoolManager().createFactory(); cluster.applyLocators(poolFactory); diff --git a/cppcache/integration/test/PartitionRegionOpsTest.cpp b/cppcache/integration/test/PartitionRegionOpsTest.cpp index 29989d7fe3..8c9e6cdfc2 100644 --- a/cppcache/integration/test/PartitionRegionOpsTest.cpp +++ b/cppcache/integration/test/PartitionRegionOpsTest.cpp @@ -61,7 +61,7 @@ Cache createCache() { using apache::geode::client::CacheFactory; auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .set("log-file", getClientLogName()) .set("statistic-sampling-enabled", "false") .create(); diff --git a/cppcache/integration/test/PdxInstanceFactoryTest.cpp b/cppcache/integration/test/PdxInstanceFactoryTest.cpp index 348b96ae12..bd3168fe79 100644 --- a/cppcache/integration/test/PdxInstanceFactoryTest.cpp +++ b/cppcache/integration/test/PdxInstanceFactoryTest.cpp @@ -41,7 +41,7 @@ const std::string regionName = "my_region"; std::shared_ptr createCache() { auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .setPdxReadSerialized(true) .create(); return std::make_shared(std::move(cache)); diff --git a/cppcache/integration/test/RegionPutAllTest.cpp b/cppcache/integration/test/RegionPutAllTest.cpp index c487b4e352..7fd7804f11 100644 --- a/cppcache/integration/test/RegionPutAllTest.cpp +++ b/cppcache/integration/test/RegionPutAllTest.cpp @@ -50,7 +50,7 @@ Cache createCache() { using apache::geode::client::CacheFactory; auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .set("statistic-sampling-enabled", "false") .create(); diff --git a/cppcache/integration/test/SslTwoWayTest.cpp b/cppcache/integration/test/SslTwoWayTest.cpp index 877d3ae9c5..db80baf074 100644 --- a/cppcache/integration/test/SslTwoWayTest.cpp +++ b/cppcache/integration/test/SslTwoWayTest.cpp @@ -84,7 +84,7 @@ TEST_F(SslTwoWayTest, PutGetWithValidSslConfiguration) { (clientSslKeysDir / boost::filesystem::path("client_truststore_chained_root.pem")); auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .set("log-file", "./gemfire.log") .set("ssl-enabled", "true") .set("ssl-keystore", clientKeystore.string()) diff --git a/cppcache/integration/test/TransactionsTest.cpp b/cppcache/integration/test/TransactionsTest.cpp index f0f5b643c5..eacaa040e0 100644 --- a/cppcache/integration/test/TransactionsTest.cpp +++ b/cppcache/integration/test/TransactionsTest.cpp @@ -37,7 +37,7 @@ using apache::geode::client::Region; using apache::geode::client::RegionShortcut; std::shared_ptr createCache() { - auto cache = CacheFactory().set("log-level", "debug").create(); + auto cache = CacheFactory().set("log-level", "none").create(); return std::make_shared(std::move(cache)); } diff --git a/cppcache/integration/test/WanDeserializationTest.cpp b/cppcache/integration/test/WanDeserializationTest.cpp index c34e3ca19f..0b41d298e2 100644 --- a/cppcache/integration/test/WanDeserializationTest.cpp +++ b/cppcache/integration/test/WanDeserializationTest.cpp @@ -67,7 +67,7 @@ Cache createCache(std::string durableClientId) { using apache::geode::client::CacheFactory; auto cache = CacheFactory() - .set("log-level", "debug") + .set("log-level", "none") .set("statistic-sampling-enabled", "false") .setPdxReadSerialized(true) .set("durable-client-id", durableClientId) From 62165c11dc87b53ec81fd115411162007bdb9265 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 31 Dec 2020 00:01:10 -0800 Subject: [PATCH 141/155] clang-tidy. --- .clang-tidy | 6 +- .../benchmark/SerializationRegistryBM.cpp | 19 +-- cppcache/include/geode/Cache.hpp | 11 +- .../include/geode/CqServiceStatistics.hpp | 16 +- cppcache/include/geode/FunctionService.hpp | 2 +- cppcache/include/geode/PdxInstance.hpp | 2 +- cppcache/include/geode/PdxUnreadFields.hpp | 4 +- cppcache/include/geode/Properties.hpp | 2 +- .../include/geode/WritablePdxInstance.hpp | 5 +- .../include/geode/internal/geode_base.hpp | 4 +- cppcache/integration-test/.clang-tidy | 20 +-- cppcache/integration-test/BBNamingContext.cpp | 1 + .../BuiltinCacheableWrappers.hpp | 139 +++++---------- cppcache/integration-test/QueryHelper.hpp | 4 +- cppcache/integration-test/TallyListener.hpp | 43 +---- cppcache/integration-test/TallyLoader.hpp | 24 +-- .../ThinClientListenerInit.hpp | 6 +- .../ThinClientLocalCacheLoader.hpp | 8 +- .../ThinClientSecurityHelper.hpp | 4 +- .../ThinClientTransactions.hpp | 21 ++- .../ThinClientTransactionsXA.hpp | 32 ++-- cppcache/integration-test/TimeBomb.hpp | 4 +- cppcache/integration-test/fw_dunit.cpp | 7 +- cppcache/integration-test/fw_dunit.hpp | 36 ++-- cppcache/integration-test/fw_helper.hpp | 2 +- cppcache/integration-test/fw_perf.hpp | 46 +---- cppcache/integration-test/fw_spawn.hpp | 8 +- cppcache/integration-test/testFwPerf.cpp | 6 +- cppcache/integration-test/testLogger.cpp | 5 +- .../testOverflowPutGetSqLite.cpp | 2 +- .../testRegionAccessThreadSafe.cpp | 2 +- cppcache/integration-test/testSpinLock.cpp | 4 +- .../testThinClientConflation.cpp | 6 +- .../integration-test/testThinClientCq.cpp | 22 ++- .../testThinClientCqDelta.cpp | 2 +- .../testThinClientCqFailover.cpp | 2 +- .../testThinClientCqHAFailover.cpp | 2 +- .../testThinClientHAQueryFailover.cpp | 2 +- .../testThinClientInterest1Cacheless.cpp | 4 +- .../testThinClientInterestNotify.cpp | 11 +- .../testThinClientListenerCallbackArgTest.cpp | 20 +-- .../testThinClientPRSingleHop.cpp | 2 +- .../testThinClientPoolAttrTest.cpp | 2 +- ...ientPoolExecuteFunctionThrowsException.cpp | 1 - .../testThinClientPoolExecuteHAFunction.cpp | 1 - .../testThinClientPoolServer.cpp | 1 - .../testThinClientRemoteQueryFailover.cpp | 2 +- .../testThinClientRemoteQueryFailoverPdx.cpp | 2 +- .../testThinClientTicket304.cpp | 2 - .../integration-test/testTimedSemaphore.cpp | 3 +- cppcache/integration/framework/.clang-tidy | 15 -- cppcache/integration/framework/Cluster.cpp | 67 ++++---- .../integration/test/AuthInitializeTest.cpp | 8 +- .../test/CqPlusAuthInitializeTest.cpp | 2 +- .../test/PartitionRegionOpsTest.cpp | 4 +- .../integration/test/RegisterKeysTest.cpp | 2 +- .../integration/test/TransactionsTest.cpp | 8 +- cppcache/src/AdminRegion.cpp | 9 +- cppcache/src/AuthenticatedView.cpp | 2 - cppcache/src/CacheXmlParser.hpp | 20 ++- cppcache/src/ConcurrentEntriesMap.cpp | 3 +- cppcache/src/ConcurrentEntriesMap.hpp | 125 +++++++------- cppcache/src/ConnectionQueue.hpp | 2 +- cppcache/src/CqEventImpl.hpp | 15 +- cppcache/src/CqQueryImpl.cpp | 9 +- cppcache/src/CqQueryVsdStats.cpp | 2 +- cppcache/src/CqQueryVsdStats.hpp | 12 +- cppcache/src/CqService.cpp | 8 +- cppcache/src/CqServiceVsdStats.cpp | 2 +- cppcache/src/CqServiceVsdStats.hpp | 25 +-- cppcache/src/EntryExpiryHandler.hpp | 26 ++- cppcache/src/ExpMapEntry.hpp | 40 ++--- cppcache/src/ExpiryTaskManager.cpp | 10 +- cppcache/src/ExpiryTaskManager.hpp | 18 +- cppcache/src/FunctionServiceImpl.cpp | 39 ----- cppcache/src/FunctionServiceImpl.hpp | 60 ------- cppcache/src/LRUAction.hpp | 38 ++--- cppcache/src/LRUEntriesMap.cpp | 13 +- cppcache/src/LRUEntriesMap.hpp | 84 ++++----- cppcache/src/LRUExpMapEntry.hpp | 47 +++-- cppcache/src/LRULocalDestroyAction.hpp | 17 +- cppcache/src/LRUMapEntry.hpp | 54 +++--- cppcache/src/LocalRegion.cpp | 6 +- cppcache/src/LocalRegion.hpp | 34 ++-- cppcache/src/MapEntry.hpp | 30 ++-- cppcache/src/PdxRemotePreservedData.hpp | 31 ++-- cppcache/src/RemoteQuery.cpp | 7 +- cppcache/src/SerializationRegistry.cpp | 1 - cppcache/src/ServerLocation.hpp | 7 +- cppcache/src/SystemProperties.cpp | 13 +- cppcache/src/TXId.cpp | 8 +- cppcache/src/TXId.hpp | 18 +- cppcache/src/TcrChunkedContext.hpp | 2 +- cppcache/src/TcrConnection.cpp | 38 +---- cppcache/src/TcrConnection.hpp | 12 -- cppcache/src/TcrConnectionManager.cpp | 29 +--- cppcache/src/TcrConnectionManager.hpp | 2 - cppcache/src/TcrDistributionManager.hpp | 31 ++-- cppcache/src/TcrEndpoint.cpp | 2 + cppcache/src/ThinClientBaseDM.cpp | 11 +- cppcache/src/ThinClientBaseDM.hpp | 2 +- .../ThinClientCacheDistributionManager.hpp | 37 ++-- .../src/ThinClientDistributionManager.hpp | 3 +- cppcache/src/ThinClientPoolDM.cpp | 11 +- cppcache/src/ThinClientPoolDM.hpp | 8 +- cppcache/src/ThinClientPoolHADM.cpp | 24 +-- cppcache/src/ThinClientPoolStickyHADM.hpp | 15 +- cppcache/src/ThinClientRedundancyManager.cpp | 10 +- cppcache/src/ThinClientRegion.cpp | 66 ++------ cppcache/src/ThinClientRegion.hpp | 160 ++++++++---------- cppcache/src/ThreadPool.hpp | 2 +- cppcache/src/TrackedMapEntry.hpp | 2 +- cppcache/src/VersionStamp.hpp | 5 +- .../src/VersionedCacheableObjectPartList.cpp | 3 +- cppcache/src/statistics/OsStatisticsImpl.cpp | 32 ++-- cppcache/test/CacheableDateTest.cpp | 8 +- cppcache/test/CacheableKeyCreateTests.cpp | 36 ++-- cppcache/test/CacheableKeysTest.cpp | 8 +- cppcache/test/ChunkedHeaderTest.cpp | 2 +- cppcache/test/DataInputTest.cpp | 87 ++++++---- cppcache/test/DataOutputTest.cpp | 25 +-- cppcache/test/PdxInstanceImplTest.cpp | 4 +- cppcache/test/SerializableCreateTests.cpp | 28 +-- cppcache/test/TcrMessageTest.cpp | 72 ++++---- cppcache/test/ThreadPoolTest.cpp | 2 +- cppcache/test/util/chrono/durationTest.cpp | 12 +- cppcache/test/util/functionalTests.cpp | 2 +- cppcache/test/util/queueTest.cpp | 2 +- cppcache/test/util/synchronized_mapTest.cpp | 36 ++-- cppcache/test/util/synchronized_setTest.cpp | 42 ++--- tests/cpp/.clang-tidy | 4 + tests/cpp/fwklib/FwkBBServer.hpp | 8 +- tests/cpp/fwklib/FwkLog.hpp | 43 ++--- tests/cpp/fwklib/Service.hpp | 7 +- tests/cpp/fwklib/TimeBomb.hpp | 4 +- tests/cpp/fwklib/UDPIpc.hpp | 43 ++--- .../cpp/security/DummyCredentialGenerator.hpp | 5 +- .../security/XmlAuthzCredentialGenerator.hpp | 12 +- tests/cpp/testobject/VariousPdxTypes.cpp | 1 - 139 files changed, 1010 insertions(+), 1513 deletions(-) delete mode 100644 cppcache/integration/framework/.clang-tidy delete mode 100644 cppcache/src/FunctionServiceImpl.cpp delete mode 100644 cppcache/src/FunctionServiceImpl.hpp create mode 100644 tests/cpp/.clang-tidy diff --git a/.clang-tidy b/.clang-tidy index 6d39dedd8e..1019b3c688 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -1,8 +1,8 @@ --- -Checks: '-*,clang-diagnostic-*,clang-analyzer-*,-clang-analyzer-alpha*,google-*,-google-readability-todo,-google-runtime-references,-google-default-arguments,-clang-analyzer-core.uninitialized.UndefReturn,-clang-analyzer-core.UndefinedBinaryOperatorResult,-clang-analyzer-optin.cplusplus.VirtualCall' +Checks: '-*,clang-diagnostic-*,clang-analyzer-*,-clang-analyzer-alpha*,google-*,-google-readability-todo,-google-runtime-references,-google-default-arguments' WarningsAsErrors: '*' HeaderFilterRegex: '.*' -FormatStyle: file +FormatStyle: file ... # Disable Checks @@ -14,5 +14,3 @@ FormatStyle: file # clang-analyzer-core.uninitialized.UndefReturn - Generates errors in ACE, how do we ignore? # clang-analyzer-core.UndefinedBinaryOperatorResult - Generates errors in ACE, how do we ignore? -# TEMP -# clang-analyzer-optin.cplusplus.VirtualCall \ No newline at end of file diff --git a/cppcache/benchmark/SerializationRegistryBM.cpp b/cppcache/benchmark/SerializationRegistryBM.cpp index 89be4ccabc..bf14d85787 100644 --- a/cppcache/benchmark/SerializationRegistryBM.cpp +++ b/cppcache/benchmark/SerializationRegistryBM.cpp @@ -36,12 +36,13 @@ using apache::geode::client::internal::DSFid; class TestPdxClass : public PdxSerializable { public: - TestPdxClass() {} - void fromData(PdxReader&) {} + TestPdxClass() = default; - void toData(PdxWriter&) const {} + void fromData(PdxReader&) override {} - const std::string& getClassName() const { return className; } + void toData(PdxWriter&) const override {} + + const std::string& getClassName() const override { return className; } static std::shared_ptr createDeserializable() { return std::make_shared(); @@ -53,19 +54,15 @@ class TestPdxClass : public PdxSerializable { class TestDataSerializableClass : public DataSerializable { public: - TestDataSerializableClass() {} - void fromData(DataInput&) {} + TestDataSerializableClass() = default; - void toData(DataOutput&) const {} + void fromData(DataInput&) override {} - const std::string& getClassName() const { return className; } + void toData(DataOutput&) const override {} static std::shared_ptr createInstance() { return std::make_shared(); } - - private: - std::string className = "myserializableclass"; }; static void SerializationRegistryBM_findDataSerializablePrimitive( diff --git a/cppcache/include/geode/Cache.hpp b/cppcache/include/geode/Cache.hpp index 0ed5985ebd..15ea3aa036 100644 --- a/cppcache/include/geode/Cache.hpp +++ b/cppcache/include/geode/Cache.hpp @@ -27,10 +27,6 @@ #include "GeodeCache.hpp" #include "internal/geode_globals.hpp" -/** - * @file - */ - namespace apache { namespace geode { namespace client { @@ -67,9 +63,6 @@ enum class RegionShortcut; * */ class APACHE_GEODE_EXPORT Cache : public GeodeCache { - /** - * @brief public methods - */ public: /** * Returns the {@link RegionFactory} to create the region. @@ -268,9 +261,6 @@ class APACHE_GEODE_EXPORT Cache : public GeodeCache { Cache& operator=(Cache&& other) noexcept; private: - /** - * @brief constructors - */ Cache(const std::shared_ptr& dsProp, bool ignorePdxUnreadFields, bool readPdxSerialized, const std::shared_ptr& authInitialize); @@ -284,6 +274,7 @@ class APACHE_GEODE_EXPORT Cache : public GeodeCache { friend class CacheXmlCreation; friend class RegionXmlCreation; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/include/geode/CqServiceStatistics.hpp b/cppcache/include/geode/CqServiceStatistics.hpp index 5582dc2c8a..7236f23593 100644 --- a/cppcache/include/geode/CqServiceStatistics.hpp +++ b/cppcache/include/geode/CqServiceStatistics.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_CQSERVICESTATISTICS_H_ -#define GEODE_CQSERVICESTATISTICS_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,11 +15,12 @@ * limitations under the License. */ -#include "internal/geode_globals.hpp" +#pragma once -/** - * @file - */ +#ifndef GEODE_CQSERVICESTATISTICS_H_ +#define GEODE_CQSERVICESTATISTICS_H_ + +#include "internal/geode_globals.hpp" namespace apache { namespace geode { @@ -38,6 +34,8 @@ namespace client { */ class APACHE_GEODE_EXPORT CqServiceStatistics { public: + virtual ~CqServiceStatistics() noexcept = default; + /** * Get the number of CQs currently active. * Active CQs are those which are executing (in running state). diff --git a/cppcache/include/geode/FunctionService.hpp b/cppcache/include/geode/FunctionService.hpp index 43a9519f17..8dbb10ae55 100644 --- a/cppcache/include/geode/FunctionService.hpp +++ b/cppcache/include/geode/FunctionService.hpp @@ -134,7 +134,7 @@ class APACHE_GEODE_EXPORT FunctionService { return onServersWithCache(regionService); } - virtual ~FunctionService() {} + virtual ~FunctionService() noexcept = default; private: static Execution onServerWithPool(const std::shared_ptr& pool); diff --git a/cppcache/include/geode/PdxInstance.hpp b/cppcache/include/geode/PdxInstance.hpp index a2758d4eef..2ac3a908ab 100644 --- a/cppcache/include/geode/PdxInstance.hpp +++ b/cppcache/include/geode/PdxInstance.hpp @@ -53,7 +53,7 @@ class APACHE_GEODE_EXPORT PdxInstance : public PdxSerializable { /** * @brief destructor */ - ~PdxInstance() override = default; + ~PdxInstance() noexcept override = default; /** * Deserializes and returns the domain object that this instance represents. diff --git a/cppcache/include/geode/PdxUnreadFields.hpp b/cppcache/include/geode/PdxUnreadFields.hpp index 38541eb6f4..8d5f827755 100644 --- a/cppcache/include/geode/PdxUnreadFields.hpp +++ b/cppcache/include/geode/PdxUnreadFields.hpp @@ -42,8 +42,8 @@ namespace client { **/ class APACHE_GEODE_EXPORT PdxUnreadFields { public: - PdxUnreadFields() {} - virtual ~PdxUnreadFields() {} + PdxUnreadFields() = default; + virtual ~PdxUnreadFields() = default; }; } // namespace client } // namespace geode diff --git a/cppcache/include/geode/Properties.hpp b/cppcache/include/geode/Properties.hpp index 375670c933..40077d71b1 100644 --- a/cppcache/include/geode/Properties.hpp +++ b/cppcache/include/geode/Properties.hpp @@ -55,7 +55,7 @@ class APACHE_GEODE_EXPORT Properties public: virtual void visit(const std::shared_ptr& key, const std::shared_ptr& value) = 0; - virtual ~Visitor() {} + virtual ~Visitor() noexcept = default; }; Properties() = default; diff --git a/cppcache/include/geode/WritablePdxInstance.hpp b/cppcache/include/geode/WritablePdxInstance.hpp index 71d232e7b5..6341c977e0 100644 --- a/cppcache/include/geode/WritablePdxInstance.hpp +++ b/cppcache/include/geode/WritablePdxInstance.hpp @@ -39,10 +39,7 @@ class CacheableObjectArray; */ class APACHE_GEODE_EXPORT WritablePdxInstance : public PdxInstance { public: - /** - * @brief destructor - */ - virtual ~WritablePdxInstance() = default; + ~WritablePdxInstance() noexcept override = default; /** * Set the existing named field to the given value. diff --git a/cppcache/include/geode/internal/geode_base.hpp b/cppcache/include/geode/internal/geode_base.hpp index 26bf2650b5..c253e2bb3c 100644 --- a/cppcache/include/geode/internal/geode_base.hpp +++ b/cppcache/include/geode/internal/geode_base.hpp @@ -62,10 +62,10 @@ /** Deletes array x only if it exists */ #define _GEODE_SAFE_DELETE_ARRAY(x) \ - { \ + do { \ delete[] x; \ x = nullptr; \ - } + } while (0) #include #include diff --git a/cppcache/integration-test/.clang-tidy b/cppcache/integration-test/.clang-tidy index a3b1d36c53..910b72624f 100644 --- a/cppcache/integration-test/.clang-tidy +++ b/cppcache/integration-test/.clang-tidy @@ -1,19 +1,7 @@ --- -Checks: '-*,clang-diagnostic-*,clang-analyzer-*,-clang-analyzer-alpha*,google-*,-google-readability-todo,-google-runtime-references,-google-default-arguments,-clang-analyzer-core.uninitialized.UndefReturn,-clang-analyzer-core.UndefinedBinaryOperatorResult,-clang-analyzer-optin.cplusplus.VirtualCall,-clang-analyzer-cplusplus.NewDeleteLeaks,-google-readability-function-size' -WarningsAsErrors: '*' -HeaderFilterRegex: '.*' -FormatStyle: file +InheritParentConfig: true +Checks: '-google-readability-function-size' ... -# Disable Checks -# google-runtime-references - We have diverged from this rule due to both legacy and disagreement with the rule. -# google-readability-todo - Adds current user name when fix applied. -# clang-analyzer-cplusplus.NewDeleteLeaks - Fundamental design flaw in dunit::Task - -# TODO - Fix these checks -# google-default-arguments -# clang-analyzer-core.uninitialized.UndefReturn - Generates errors in ACE, how do we ignore? -# clang-analyzer-core.UndefinedBinaryOperatorResult - Generates errors in ACE, how do we ignore? - -# TEMP -# clang-analyzer-optin.cplusplus.VirtualCall +# google-readability-function-size +# Some tests have large bodies that cause this rule to fail. diff --git a/cppcache/integration-test/BBNamingContext.cpp b/cppcache/integration-test/BBNamingContext.cpp index 0de09f7c82..fa707ef27a 100644 --- a/cppcache/integration-test/BBNamingContext.cpp +++ b/cppcache/integration-test/BBNamingContext.cpp @@ -283,6 +283,7 @@ BBNamingContextServer::BBNamingContextServer() { m_impl = new BBNamingContextServerImpl(); } BBNamingContextServer::~BBNamingContextServer() { + // NOLINTNEXTLINE(clang-analyzer-unix.Malloc): ACE if (m_impl != nullptr) { delete m_impl; m_impl = nullptr; diff --git a/cppcache/integration-test/BuiltinCacheableWrappers.hpp b/cppcache/integration-test/BuiltinCacheableWrappers.hpp index a68f674473..1efa2a1a5e 100644 --- a/cppcache/integration-test/BuiltinCacheableWrappers.hpp +++ b/cppcache/integration-test/BuiltinCacheableWrappers.hpp @@ -334,11 +334,10 @@ class CacheableDateWrapper : public CacheableWrapper { } void initRandomValue(int32_t) override { - int32_t rnd = CacheableHelper::random(INT_MAX); - time_t timeofday = 0; + auto rnd = CacheableHelper::random(INT_MAX); const ACE_Time_Value currentTime = ACE_OS::gettimeofday(); - timeofday = currentTime.sec(); + auto timeofday = currentTime.sec(); time_t epoctime = static_cast(timeofday + (rnd * (rnd % 2 == 0 ? 1 : -1))); @@ -362,9 +361,9 @@ class CacheableFileNameWrapper : public CacheableWrapper { // CacheableWrapper members - virtual int32_t maxKeys() const { return INT_MAX; } + int32_t maxKeys() const override { return INT_MAX; } - virtual void initKey(int32_t keyIndex, int32_t maxSize) { + void initKey(int32_t keyIndex, int32_t maxSize) override { maxSize %= (0xFFFF + 1); if (maxSize < 11) { maxSize = 11; @@ -385,7 +384,7 @@ class CacheableFileNameWrapper : public CacheableWrapper { m_cacheableObject = CacheableFileName::create(baseStr); } - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize %= (0xFFFF + 1); std::string randStr; CacheableHelper::randomString(maxSize, randStr); @@ -395,7 +394,7 @@ class CacheableFileNameWrapper : public CacheableWrapper { m_cacheableObject = CacheableFileName::create(randStr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { auto&& obj = std::dynamic_pointer_cast(object); return (obj ? CacheableHelper::crc32( reinterpret_cast(obj->value().c_str()), @@ -531,9 +530,9 @@ class CacheableStringWrapper : public CacheableWrapper { // CacheableWrapper members - virtual int32_t maxKeys() const { return INT_MAX; } + int32_t maxKeys() const override { return INT_MAX; } - virtual void initKey(int32_t keyIndex, int32_t maxSize) { + void initKey(int32_t keyIndex, int32_t maxSize) override { maxSize %= (0xFFFF + 1); if (maxSize < 11) { maxSize = 11; @@ -545,14 +544,14 @@ class CacheableStringWrapper : public CacheableWrapper { m_cacheableObject = CacheableString::create(baseStr); } - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize %= (0xFFFF + 1); std::string randStr; CacheableHelper::randomString(maxSize, randStr); m_cacheableObject = CacheableString::create(randStr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const CacheableString* obj = dynamic_cast(object.get()); return (obj != nullptr @@ -573,9 +572,9 @@ class CacheableHugeStringWrapper : public CacheableWrapper { // CacheableWrapper members - virtual int32_t maxKeys() const { return INT_MAX; } + int32_t maxKeys() const override { return INT_MAX; } - virtual void initKey(int32_t keyIndex, int32_t maxSize) { + void initKey(int32_t keyIndex, int32_t maxSize) override { if (maxSize <= 0xFFFF) // ensure its larger than 64k { maxSize += (0xFFFF + 1); @@ -587,7 +586,7 @@ class CacheableHugeStringWrapper : public CacheableWrapper { m_cacheableObject = CacheableString::create(baseStr); } - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { if (maxSize <= 0xFFFF) // ensure its larger than 64k { maxSize += (0xFFFF + 1); @@ -597,7 +596,7 @@ class CacheableHugeStringWrapper : public CacheableWrapper { m_cacheableObject = CacheableString::create(randStr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const CacheableString* obj = dynamic_cast(object.get()); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -608,19 +607,15 @@ class CacheableHugeStringWrapper : public CacheableWrapper { class CacheableHugeUnicodeStringWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableHugeUnicodeStringWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableHugeUnicodeStringWrapper(); } - // CacheableWrapper members - - virtual int32_t maxKeys() const { return INT_MAX; } + int32_t maxKeys() const override { return INT_MAX; } - virtual void initKey(int32_t keyIndex, int32_t maxSize) { + void initKey(int32_t keyIndex, int32_t maxSize) override { if (maxSize <= 0xFFFF) // ensure its larger than 64k { maxSize += (0xFFFF + 1); @@ -632,7 +627,7 @@ class CacheableHugeUnicodeStringWrapper : public CacheableWrapper { m_cacheableObject = CacheableString::create(baseStr); } - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { if (maxSize <= 0xFFFF) // ensure its larger than 64k { maxSize += (0xFFFF + 1); @@ -642,7 +637,7 @@ class CacheableHugeUnicodeStringWrapper : public CacheableWrapper { m_cacheableObject = CacheableString::create(randStr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { auto&& obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); return CacheableHelper::crc32( @@ -663,9 +658,9 @@ class CacheableUnicodeStringWrapper : public CacheableWrapper { // CacheableWrapper members - virtual int32_t maxKeys() const { return INT_MAX; } + int32_t maxKeys() const override { return INT_MAX; } - virtual void initKey(int32_t keyIndex, int32_t maxSize) { + void initKey(int32_t keyIndex, int32_t maxSize) override { maxSize %= 21800; // so that encoded length is within 64k if (maxSize < 11) { maxSize = 11; @@ -677,14 +672,14 @@ class CacheableUnicodeStringWrapper : public CacheableWrapper { m_cacheableObject = CacheableString::create(baseStr); } - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize %= 21800; // so that encoded length is within 64k std::wstring randStr; CacheableHelper::randomString(maxSize, randStr); m_cacheableObject = CacheableString::create(randStr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { auto&& obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); return CacheableHelper::crc32( @@ -886,15 +881,13 @@ class CacheableBytesWrapper : public CacheableWrapper { static CacheableWrapper* create() { return new CacheableBytesWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { auto randArr = CacheableHelper::randomArray(maxSize, UCHAR_MAX); m_cacheableObject = CacheableBytes::create( std::vector(std::begin(randArr), std::end(randArr))); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const CacheableBytes* obj = dynamic_cast(object.get()); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -904,24 +897,20 @@ class CacheableBytesWrapper : public CacheableWrapper { class CacheableDoubleArrayWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableDoubleArrayWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableDoubleArrayWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize = maxSize / sizeof(double) + 1; auto randArr = CacheableHelper::randomArray(maxSize, static_cast(INT_MAX)); m_cacheableObject = CacheableDoubleArray::create(randArr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const auto obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -931,22 +920,18 @@ class CacheableDoubleArrayWrapper : public CacheableWrapper { class CacheableFloatArrayWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableFloatArrayWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableFloatArrayWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize = maxSize / sizeof(float) + 1; auto randArr = CacheableHelper::randomArray(maxSize, static_cast(INT_MAX)); m_cacheableObject = CacheableFloatArray::create(randArr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const auto obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -956,21 +941,17 @@ class CacheableFloatArrayWrapper : public CacheableWrapper { class CacheableInt16ArrayWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableInt16ArrayWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableInt16ArrayWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize = maxSize / sizeof(int16_t) + 1; auto randArr = CacheableHelper::randomArray(maxSize, SHRT_MAX); m_cacheableObject = CacheableInt16Array::create(randArr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const auto obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -980,21 +961,17 @@ class CacheableInt16ArrayWrapper : public CacheableWrapper { class CacheableInt32ArrayWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableInt32ArrayWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableInt32ArrayWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize = maxSize / sizeof(int32_t) + 1; auto randArr = CacheableHelper::randomArray(maxSize, INT_MAX); m_cacheableObject = CacheableInt32Array::create(randArr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const auto obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -1004,21 +981,17 @@ class CacheableInt32ArrayWrapper : public CacheableWrapper { class CacheableInt64ArrayWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableInt64ArrayWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableInt64ArrayWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { maxSize = maxSize / sizeof(int64_t) + 1; auto randArr = CacheableHelper::randomArray(maxSize, INT_MAX); m_cacheableObject = CacheableInt64Array::create(randArr); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { const auto obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); @@ -1028,14 +1001,10 @@ class CacheableInt64ArrayWrapper : public CacheableWrapper { class CacheableNullStringWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableNullStringWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableNullStringWrapper(); } - // CacheableWrapper members - void initRandomValue(int32_t) override { m_cacheableObject = CacheableString::create(static_cast(nullptr)); } @@ -1048,17 +1017,13 @@ class CacheableNullStringWrapper : public CacheableWrapper { class CacheableStringArrayWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableStringArrayWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableStringArrayWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { int32_t arraySize = 16; maxSize = maxSize / arraySize; if (maxSize < 2) { @@ -1083,7 +1048,7 @@ class CacheableStringArrayWrapper : public CacheableWrapper { randArr + arraySize)); } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { auto&& obj = std::dynamic_pointer_cast(object); ASSERT(obj != nullptr, "getCheckSum: null object."); uint32_t checkSum = 0; @@ -1098,40 +1063,16 @@ class CacheableStringArrayWrapper : public CacheableWrapper { } }; -class CacheableUndefinedWrapper : public CacheableWrapper { - public: - // Constructor and factory function - - CacheableUndefinedWrapper() : CacheableWrapper(nullptr) {} - - static CacheableWrapper* create() { return new CacheableUndefinedWrapper(); } - - // CacheableWrapper members - - void initRandomValue(int32_t) override { - m_cacheableObject = std::shared_ptr( - CacheableUndefined::createDeserializable()); - } - - uint32_t getCheckSum(const std::shared_ptr) const override { - return 0; - } -}; - template class CacheableVectorTypeWrapper : public CacheableWrapper { public: - // Constructor and factory function - CacheableVectorTypeWrapper() : CacheableWrapper(nullptr) {} static CacheableWrapper* create() { return new CacheableVectorTypeWrapper(); } - // CacheableWrapper members - - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { auto vec = std::dynamic_pointer_cast(VECTTYPE::createDeserializable()); auto valueTypeIds = CacheableWrapperFactory::getRegisteredValueTypes(); @@ -1150,7 +1091,7 @@ class CacheableVectorTypeWrapper : public CacheableWrapper { m_cacheableObject = vec; } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { auto vec = std::dynamic_pointer_cast(object); ASSERT(vec != nullptr, "getCheckSum: null object."); uint32_t checkSum = 0; @@ -1189,7 +1130,7 @@ class CacheableObjectArrayWrapper : public CacheableWrapper { // CacheableWrapper members - virtual void initRandomValue(int32_t maxSize) { + void initRandomValue(int32_t maxSize) override { auto arr = std::dynamic_pointer_cast( CacheableObjectArray::createDeserializable()); auto valueTypeIds = CacheableWrapperFactory::getRegisteredValueTypes(); @@ -1208,7 +1149,7 @@ class CacheableObjectArrayWrapper : public CacheableWrapper { m_cacheableObject = arr; } - virtual uint32_t getCheckSum(const std::shared_ptr object) const { + uint32_t getCheckSum(const std::shared_ptr object) const override { auto&& arr = std::dynamic_pointer_cast(object); ASSERT(arr != nullptr, "getCheckSum: null object."); uint32_t checkSum = 0; diff --git a/cppcache/integration-test/QueryHelper.hpp b/cppcache/integration-test/QueryHelper.hpp index efe3fd9de0..072581060e 100644 --- a/cppcache/integration-test/QueryHelper.hpp +++ b/cppcache/integration-test/QueryHelper.hpp @@ -51,7 +51,7 @@ #define ROOT_SCOPE LOCAL #endif -namespace { // NOLINT(google-build-namespaces) +namespace { // NOLINT(google-build-namespaces) using apache::geode::client::CacheableKey; using apache::geode::client::CacheableStringArray; @@ -88,7 +88,7 @@ class QueryHelper { positionNumSets = 1; } - virtual ~QueryHelper() { ; } + virtual ~QueryHelper() {} virtual void populatePortfolioData( std::shared_ptr& pregion, size_t setSize, size_t numSets, diff --git a/cppcache/integration-test/TallyListener.hpp b/cppcache/integration-test/TallyListener.hpp index f0285aef65..c486a2ed52 100644 --- a/cppcache/integration-test/TallyListener.hpp +++ b/cppcache/integration-test/TallyListener.hpp @@ -69,12 +69,10 @@ class TallyListener : public CacheListener { LOG("TallyListener contructor called"); } - virtual ~TallyListener() {} + ~TallyListener() noexcept override = default; void beQuiet(bool v) { m_quiet = v; } - void ignoreTimeouts(bool ignore) { m_ignoreTimeout = ignore; } - int expectCreates(int expected) { int tries = 0; while ((m_creates < expected) && (tries < 200)) { @@ -99,24 +97,6 @@ class TallyListener : public CacheListener { isCallbackCalled = false; } int getUpdates() { return m_updates; } - int expectInvalidates(int expected) { - LOG("calling expectInvalidates "); - int tries = 0; - while ((m_invalidates < expected) && (tries < 200)) { - SLEEP(100); - tries++; - } - return m_invalidates; - } - int expectDestroys(int expected) { - LOG("calling expectDestroys "); - int tries = 0; - while ((m_destroys < expected) && (tries < 200)) { - SLEEP(100); - tries++; - } - return m_destroys; - } int getInvalidates() { return m_invalidates; } int getDestroys() { return m_destroys; } @@ -142,23 +122,21 @@ class TallyListener : public CacheListener { int getClears() { return m_clears; } - virtual void afterCreate(const EntryEvent& event); + void afterCreate(const EntryEvent& event) override; - virtual void afterUpdate(const EntryEvent& event); + void afterUpdate(const EntryEvent& event) override; - virtual void afterInvalidate(const EntryEvent& event); + void afterInvalidate(const EntryEvent& event) override; - virtual void afterDestroy(const EntryEvent& event); + void afterDestroy(const EntryEvent& event) override; - virtual void afterRegionClear(const RegionEvent& event) { + void afterRegionClear(const RegionEvent& event) override { CacheListener::afterRegionClear(event); } - virtual void afterRegionClear(const EntryEvent& event); + void afterRegionInvalidate(const RegionEvent&) override {} - virtual void afterRegionInvalidate(const RegionEvent&) {} - - virtual void afterRegionDestroy(const RegionEvent&) {} + void afterRegionDestroy(const RegionEvent&) override {} void showTallies() { char buf[1024]; @@ -220,11 +198,6 @@ void TallyListener::afterDestroy(const EntryEvent& event) { m_destroys++; checkcallbackArg(event); } -void TallyListener::afterRegionClear(const EntryEvent& event) { - m_clears++; - LOGINFO("TallyListener::afterRegionClear m_clears = %d", m_clears); - checkcallbackArg(event); -} } // namespace testing } // namespace client diff --git a/cppcache/integration-test/TallyLoader.hpp b/cppcache/integration-test/TallyLoader.hpp index 95818492ce..3b932b1700 100644 --- a/cppcache/integration-test/TallyLoader.hpp +++ b/cppcache/integration-test/TallyLoader.hpp @@ -38,10 +38,11 @@ class TallyLoader : virtual public CacheLoader { public: TallyLoader() : CacheLoader(), m_loads(0) {} - virtual ~TallyLoader() = default; + ~TallyLoader() noexcept override = default; - std::shared_ptr load(Region&, const std::shared_ptr&, - const std::shared_ptr&) { + std::shared_ptr load( + Region&, const std::shared_ptr&, + const std::shared_ptr&) override { LOGDEBUG("TallyLoader::load invoked for %d.", m_loads); char buf[1024]; sprintf(buf, "TallyLoader state: (loads = %d)", m_loads); @@ -49,26 +50,11 @@ class TallyLoader : virtual public CacheLoader { return CacheableInt32::create(m_loads++); } - virtual void close(Region&) { LOG("TallyLoader::close"); } - - int expectLoads(int expected) { - int tries = 0; - while ((m_loads < expected) && (tries < 200)) { - SLEEP(100); - tries++; - } - return m_loads; - } + virtual void close(Region&) override { LOG("TallyLoader::close"); } int getLoads() { return m_loads; } void reset() { m_loads = 0; } - - void showTallies() { - char buf[1024]; - sprintf(buf, "TallyLoader state: (loads = %d)", getLoads()); - LOG(buf); - } }; } // namespace testing diff --git a/cppcache/integration-test/ThinClientListenerInit.hpp b/cppcache/integration-test/ThinClientListenerInit.hpp index 9c27983343..046cc6b068 100644 --- a/cppcache/integration-test/ThinClientListenerInit.hpp +++ b/cppcache/integration-test/ThinClientListenerInit.hpp @@ -30,7 +30,7 @@ #define CLIENT2 s1p2 #define SERVER1 s2p1 -namespace { // NOLINT(google-build-namespaces) +namespace { // NOLINT(google-build-namespaces) using apache::geode::client::Cacheable; @@ -57,11 +57,11 @@ class ThinClientTallyLoader : public TallyLoader { public: ThinClientTallyLoader() : TallyLoader() {} - virtual ~ThinClientTallyLoader() = default; + ~ThinClientTallyLoader() noexcept override = default; std::shared_ptr load( Region& rp, const std::shared_ptr& key, - const std::shared_ptr& aCallbackArgument) { + const std::shared_ptr& aCallbackArgument) override { int32_t loadValue = std::dynamic_pointer_cast( TallyLoader::load(rp, key, aCallbackArgument)) ->value(); diff --git a/cppcache/integration-test/ThinClientLocalCacheLoader.hpp b/cppcache/integration-test/ThinClientLocalCacheLoader.hpp index 9969ca5686..b74e38a3c0 100644 --- a/cppcache/integration-test/ThinClientLocalCacheLoader.hpp +++ b/cppcache/integration-test/ThinClientLocalCacheLoader.hpp @@ -28,7 +28,7 @@ #define CLIENT1 s1p1 #define SERVER1 s2p1 -namespace { // NOLINT(google-build-namespaces) +namespace { // NOLINT(google-build-namespaces) using apache::geode::client::Cacheable; using apache::geode::client::CacheFactory; @@ -44,11 +44,11 @@ class ThinClientTallyLoader : public TallyLoader { public: ThinClientTallyLoader() : TallyLoader() {} - virtual ~ThinClientTallyLoader() = default; + ~ThinClientTallyLoader() noexcept override = default; std::shared_ptr load( Region& rp, const std::shared_ptr& key, - const std::shared_ptr& aCallbackArgument) { + const std::shared_ptr& aCallbackArgument) override { int32_t loadValue = std::dynamic_pointer_cast( TallyLoader::load(rp, key, aCallbackArgument)) ->value(); @@ -64,7 +64,7 @@ class ThinClientTallyLoader : public TallyLoader { return std::move(lreturnValue); } - void close(Region& region) { + void close(Region& region) override { LOG(" ThinClientTallyLoader::close() called"); LOGINFO(" Region %s is Destroyed = %d ", region.getName().c_str(), region.isDestroyed()); diff --git a/cppcache/integration-test/ThinClientSecurityHelper.hpp b/cppcache/integration-test/ThinClientSecurityHelper.hpp index 973ee8cccd..de43ce57b3 100644 --- a/cppcache/integration-test/ThinClientSecurityHelper.hpp +++ b/cppcache/integration-test/ThinClientSecurityHelper.hpp @@ -26,7 +26,7 @@ #include "ThinClientHelper.hpp" #include "hacks/AceThreadId.h" -namespace { // NOLINT(google-build-namespaces) +namespace { // NOLINT(google-build-namespaces) using apache::geode::client::CacheableBoolean; using apache::geode::client::Exception; @@ -215,7 +215,7 @@ class putThread : public ACE_Task_Base { } } - int svc(void) { + int svc(void) override { int ops = 0; auto pid = ACE_OS::getpid(); std::shared_ptr key; diff --git a/cppcache/integration-test/ThinClientTransactions.hpp b/cppcache/integration-test/ThinClientTransactions.hpp index 2e4bec6489..7ef5c157ea 100644 --- a/cppcache/integration-test/ThinClientTransactions.hpp +++ b/cppcache/integration-test/ThinClientTransactions.hpp @@ -34,7 +34,7 @@ #include "CacheHelper.hpp" -namespace { // NOLINT(google-build-namespaces) +namespace { // NOLINT(google-build-namespaces) using apache::geode::client::CacheableKey; using apache::geode::client::CacheableString; @@ -144,7 +144,8 @@ void _verifyEntry(const char* name, const char* key, const char* val, std::dynamic_pointer_cast(regPtr->get(keyPtr)); ASSERT(checkPtr != nullptr, "Value Ptr should not be null."); - LOG("In verify loop, get returned " + checkPtr->value() + " for key " + key); + LOG("In verify loop, get returned " + checkPtr->value() + " for key " + + key); if (strcmp(checkPtr->value().c_str(), value) != 0) { testValueCnt++; @@ -360,11 +361,13 @@ const bool USE_ACK = true; const bool NO_ACK = false; #include "LocatorHelper.hpp" #define THREADERRORCHECK(x, y) \ - if (!(x)) { \ - m_isFailed = true; \ - sprintf(m_error, y); \ - return -1; \ - } + do { \ + if (!(x)) { \ + m_isFailed = true; \ + sprintf(m_error, y); \ + return -1; \ + } \ + } while (0) class SuspendTransactionThread : public ACE_Task_Base { private: @@ -376,7 +379,7 @@ class SuspendTransactionThread : public ACE_Task_Base { SuspendTransactionThread(bool sleep, ACE_Auto_Event* txEvent) : m_suspendedTransaction(nullptr), m_sleep(sleep), m_txEvent(txEvent) {} - int svc(void) { + int svc(void) override { char buf[1024]; sprintf(buf, " In SuspendTransactionThread"); LOG(buf); @@ -429,7 +432,7 @@ class ResumeTransactionThread : public ACE_Task_Base { m_isFailed(false), m_txEvent(txEvent) {} - int svc(void) { + int svc(void) override { char buf[1024]; sprintf(buf, "In ResumeTransactionThread"); LOG(buf); diff --git a/cppcache/integration-test/ThinClientTransactionsXA.hpp b/cppcache/integration-test/ThinClientTransactionsXA.hpp index 84fa7845b9..4594775892 100644 --- a/cppcache/integration-test/ThinClientTransactionsXA.hpp +++ b/cppcache/integration-test/ThinClientTransactionsXA.hpp @@ -34,16 +34,16 @@ #include "CacheHelper.hpp" -namespace { // NOLINT(google-build-namespaces) +namespace { // NOLINT(google-build-namespaces) using apache::geode::client::CacheableKey; using apache::geode::client::CacheableString; using apache::geode::client::CacheHelper; using apache::geode::client::CacheServerException; +using apache::geode::client::CacheTransactionManager; using apache::geode::client::EntryExistsException; using apache::geode::client::EntryNotFoundException; using apache::geode::client::IllegalStateException; -using apache::geode::client::CacheTransactionManager; using apache::geode::client::Properties; using apache::geode::client::TransactionException; using apache::geode::client::TransactionId; @@ -145,7 +145,8 @@ void _verifyEntry(const char* name, const char* key, const char* val, std::dynamic_pointer_cast(regPtr->get(keyPtr)); ASSERT(checkPtr != nullptr, "Value Ptr should not be null."); - LOG("In verify loop, get returned " + checkPtr->value() + " for key " + key); + LOG("In verify loop, get returned " + checkPtr->value() + " for key " + + key); if (strcmp(checkPtr->value().c_str(), value) != 0) { testValueCnt++; @@ -361,11 +362,13 @@ const bool USE_ACK = true; const bool NO_ACK = false; #include "LocatorHelper.hpp" #define THREADERRORCHECK(x, y) \ - if (!(x)) { \ - m_isFailed = true; \ - sprintf(m_error, y); \ - return -1; \ - } + do { \ + if (!(x)) { \ + m_isFailed = true; \ + sprintf(m_error, y); \ + return -1; \ + } \ + } while (0) class SuspendTransactionThread : public ACE_Task_Base { private: @@ -377,7 +380,7 @@ class SuspendTransactionThread : public ACE_Task_Base { SuspendTransactionThread(bool sleep, ACE_Auto_Event* txEvent) : m_suspendedTransaction(nullptr), m_sleep(sleep), m_txEvent(txEvent) {} - int svc(void) { + int svc(void) override { char buf[1024]; sprintf(buf, " In SuspendTransactionThread"); LOG(buf); @@ -410,9 +413,7 @@ class SuspendTransactionThread : public ACE_Task_Base { } void start() { activate(); } void stop() { wait(); } - TransactionId& getSuspendedTx() { - return *m_suspendedTransaction; - } + TransactionId& getSuspendedTx() { return *m_suspendedTransaction; } }; class ResumeTransactionThread : public ACE_Task_Base { private: @@ -424,16 +425,15 @@ class ResumeTransactionThread : public ACE_Task_Base { ACE_Auto_Event* m_txEvent; public: - ResumeTransactionThread(TransactionId& suspendedTransaction, - bool commit, bool tryResumeWithSleep, - ACE_Auto_Event* txEvent) + ResumeTransactionThread(TransactionId& suspendedTransaction, bool commit, + bool tryResumeWithSleep, ACE_Auto_Event* txEvent) : m_suspendedTransaction(suspendedTransaction), m_commit(commit), m_tryResumeWithSleep(tryResumeWithSleep), m_isFailed(false), m_txEvent(txEvent) {} - int svc(void) { + int svc(void) override { char buf[1024]; sprintf(buf, "In ResumeTransactionThread"); LOG(buf); diff --git a/cppcache/integration-test/TimeBomb.hpp b/cppcache/integration-test/TimeBomb.hpp index 97d58f5f25..07ef80074b 100644 --- a/cppcache/integration-test/TimeBomb.hpp +++ b/cppcache/integration-test/TimeBomb.hpp @@ -87,7 +87,7 @@ class TimeBomb : public ACE_Task_Base { return activate(thrAttrs, 1); } - int svc() { + int svc() override { if (m_sleep == ACE_Time_Value(0)) { printf("###### TIMEBOMB Disabled. ######\n"); fflush(stdout); @@ -108,7 +108,7 @@ class TimeBomb : public ACE_Task_Base { return 0; } - ~TimeBomb() {} + ~TimeBomb() noexcept override = default; }; #endif // GEODE_INTEGRATION_TEST_TIMEBOMB_H_ diff --git a/cppcache/integration-test/fw_dunit.cpp b/cppcache/integration-test/fw_dunit.cpp index 257a1f2a3d..fa1350abd5 100644 --- a/cppcache/integration-test/fw_dunit.cpp +++ b/cppcache/integration-test/fw_dunit.cpp @@ -508,7 +508,7 @@ class TestProcess : virtual public dunit::Manager { protected: public: - virtual ~TestProcess() {} + ~TestProcess() noexcept override = default; }; /** @@ -1031,7 +1031,6 @@ PerfSuite::PerfSuite(const char *suiteName) : m_suiteName(suiteName) {} void PerfSuite::addRecord(std::string testName, int64_t ops, const TimeStamp &start, const TimeStamp &stop) { Record tmp(testName, ops, start, stop); - m_records[testName] = tmp; fprintf(stdout, "[PerfSuite] %s\n", tmp.asString().c_str()); fflush(stdout); } @@ -1129,10 +1128,6 @@ ThreadLauncher::~ThreadLauncher() { } } -Thread::Thread() : ACE_Task_Base(), m_launcher(nullptr), m_used(false) {} - -Thread::~Thread() {} - int Thread::svc() { m_used = true; int res = 0; diff --git a/cppcache/integration-test/fw_dunit.hpp b/cppcache/integration-test/fw_dunit.hpp index 59904f8e79..7b39479861 100644 --- a/cppcache/integration-test/fw_dunit.hpp +++ b/cppcache/integration-test/fw_dunit.hpp @@ -120,18 +120,18 @@ END_TASK(validate) #include #include -#define ASSERT(x, y) \ - do { \ - if (!(x)) { \ - throw dunit::TestException(y, __LINE__, __FILE__); \ - } \ - } while(false) -#define XASSERT(x) \ - do { \ - if (!(x)) { \ - throw dunit::TestException(#x, __LINE__, __FILE__); \ - } \ - } while(false) +#define ASSERT(x, y) \ + do { \ + if (!(x)) { \ + throw dunit::TestException(y, __LINE__, __FILE__); \ + } \ + } while (false) +#define XASSERT(x) \ + do { \ + if (!(x)) { \ + throw dunit::TestException(#x, __LINE__, __FILE__); \ + } \ + } while (false) #define FAIL(y) throw dunit::TestException(y, __LINE__, __FILE__) #define LOG(y) dunit::log(y, __LINE__, __FILE__) #define LOGCOORDINATOR(y) dunit::logCoordinator(y, __LINE__, __FILE__) @@ -155,7 +155,7 @@ END_TASK(validate) DCLASSNAME(y)() { init(x); } \ \ public: \ - virtual void doTask() { \ + void doTask() override { \ static const char* fwtest_Name = DTASKDESC(y, __LINE__); \ try { // Close the class definition produced by DUNIT_TASK macro. @@ -200,10 +200,10 @@ END_TASK(validate) #define DUNIT_TASK_DEFINITION(x, y) \ class DCLASSDEF(y) : virtual public dunit::Task { \ public: \ - DCLASSDEF(y)() { init(x, true); } \ + DCLASSDEF(y)() { init(x, true); } \ \ public: \ - virtual void doTask() { \ + void doTask() override { \ static const char* fwtest_Name = DTASKDESC(y, __LINE__); \ try { #define END_TASK_DEFINITION \ @@ -224,7 +224,9 @@ END_TASK(validate) } \ } \ ; -#define CALL_TASK(y); DCLASSDEF(y) * DVARNAME(y) = new DCLASSDEF(y)() +#define CALL_TASK(y) \ + ; \ + DCLASSDEF(y) * DVARNAME(y) = new DCLASSDEF(y)() #define DUNIT_MAIN \ class DCLASSNAME(Main) { \ @@ -277,7 +279,7 @@ class Task { bool m_isHeapAllocated; Task() {} - virtual ~Task() { } + virtual ~Task() {} /** register task with worker. */ void init(int sId); diff --git a/cppcache/integration-test/fw_helper.hpp b/cppcache/integration-test/fw_helper.hpp index 11d0ace623..59e942a200 100644 --- a/cppcache/integration-test/fw_helper.hpp +++ b/cppcache/integration-test/fw_helper.hpp @@ -236,7 +236,7 @@ int main(int /*argc*/, char** /*argv*/) Test_##x() { init(); } \ \ public: \ - virtual void doTest() { \ + void doTest() override { \ static const char* fwtest_Name = #x; #define END_TEST(x) \ } \ diff --git a/cppcache/integration-test/fw_perf.hpp b/cppcache/integration-test/fw_perf.hpp index af019e6534..a952301808 100644 --- a/cppcache/integration-test/fw_perf.hpp +++ b/cppcache/integration-test/fw_perf.hpp @@ -70,15 +70,14 @@ class Semaphore { volatile int m_count; public: + Semaphore() = delete; explicit Semaphore(int count); ~Semaphore(); + Semaphore(const Semaphore& other) = delete; + Semaphore& operator=(const Semaphore& other) = delete; + void acquire(int t = 1); void release(int t = 1); - - private: - Semaphore(); - Semaphore(const Semaphore& other); - Semaphore& operator=(const Semaphore& other); }; class TimeStamp { @@ -125,12 +124,9 @@ class Record { ~Record(); }; -typedef std::map RecordMap; - class PerfSuite { private: std::string m_suiteName; - RecordMap m_records; public: explicit PerfSuite(const char* suiteName); @@ -193,7 +189,8 @@ class Thread : public ACE_Task_Base { bool m_used; public: - Thread(); + Thread() : ACE_Task_Base(), m_launcher(nullptr), m_used(false) {} + // Unhide function to prevent SunPro Warnings using ACE_Shared_Object::init; void init(ThreadLauncher* l) { @@ -201,7 +198,7 @@ class Thread : public ACE_Task_Base { m_launcher = l; } - ~Thread(); + ~Thread() noexcept override = default; /** called before measurement begins. override to do per thread setup. */ virtual void setup() {} @@ -213,36 +210,9 @@ class Thread : public ACE_Task_Base { */ virtual void cleanup() {} - virtual int svc(); + int svc() override; }; -// class NamingServiceThread -//: public ACE_Task_Base -//{ -// private: -// uint32_t m_port; -// -// void namingService() -// { -// char * argsv[2]; -// char pbuf[32]; -// sprintf( pbuf, "-p %d", 12321 ); -// -// argsv[0] = strdup( pbuf ); -// argsv[1] = 0; -// auto svcObj = ACE_SVC_INVOKE( ACE_Name_Acceptor ); -// -// if ( svcObj->init( 1, argsv ) == -1 ) { -// fprintf( stdout, "Failed to construct the Naming Service." ); -// fflush( stdout ); -// } -// ACE_Reactor::run_event_loop(); -// } -// -// public: -// NamingServiceThread( uint32_t port ) : m_port( port ) {} -// virtual int svc() { };//namingService(); } -//}; } // namespace perf #endif // GEODE_INTEGRATION_TEST_FW_PERF_H_ diff --git a/cppcache/integration-test/fw_spawn.hpp b/cppcache/integration-test/fw_spawn.hpp index e49da7dc49..16ba989fde 100644 --- a/cppcache/integration-test/fw_spawn.hpp +++ b/cppcache/integration-test/fw_spawn.hpp @@ -32,14 +32,14 @@ #if defined(_WIN32) #if (FD_SETSIZE != 1024) -++ + bad fdsetsize... ++++bad fdsetsize... #endif #endif #include #include - namespace dunit { + namespace dunit { // Listing 1 code/ch10 class Manager : virtual public ACE_Process { @@ -95,7 +95,7 @@ // Listing 2 code/ch10 // prepare() is inherited from ACE_Process. - virtual int prepare(ACE_Process_Options &options) { + int prepare(ACE_Process_Options &options) override { options.command_line("%s", this->programName_); if (this->setStdHandles(options) == -1 || this->setEnvVariable(options) == -1) { @@ -117,7 +117,7 @@ private: protected: - virtual ~Manager() {} + ~Manager() noexcept override = default; private: ACE_HANDLE outputfd_; diff --git a/cppcache/integration-test/testFwPerf.cpp b/cppcache/integration-test/testFwPerf.cpp index 8615b944f4..e5304e17af 100644 --- a/cppcache/integration-test/testFwPerf.cpp +++ b/cppcache/integration-test/testFwPerf.cpp @@ -24,18 +24,18 @@ class LocalPutTask : public perf::Thread { public: LocalPutTask() : Thread() {} - virtual void setup() { + void setup() override { fprintf(stdout, "performed my setup...\n"); fflush(stdout); } - virtual void perftask() { + void perftask() override { ACE_OS::sleep(1); fprintf(stdout, "perffunc done.\n"); fflush(stdout); } - virtual void cleanup() { + void cleanup() override { fprintf(stdout, "performed my cleanup...\n"); fflush(stdout); } diff --git a/cppcache/integration-test/testLogger.cpp b/cppcache/integration-test/testLogger.cpp index 5645db4835..f3a245d9a7 100644 --- a/cppcache/integration-test/testLogger.cpp +++ b/cppcache/integration-test/testLogger.cpp @@ -31,13 +31,12 @@ using apache::geode::client::LogLevel; int numOfLinesInFile(const char *fname) { char line[2048]; - char *read; int ln_cnt = 0; - FILE *fp = fopen(fname, "r"); + auto fp = fopen(fname, "r"); if (fp == nullptr) { return -1; } - while (!!(read = fgets(line, sizeof line, fp))) { + while (!!(fgets(line, sizeof line, fp))) { printf("%d:%s", ++ln_cnt, line); } diff --git a/cppcache/integration-test/testOverflowPutGetSqLite.cpp b/cppcache/integration-test/testOverflowPutGetSqLite.cpp index 594d0d98ae..6e353598e0 100644 --- a/cppcache/integration-test/testOverflowPutGetSqLite.cpp +++ b/cppcache/integration-test/testOverflowPutGetSqLite.cpp @@ -288,7 +288,7 @@ class PutThread : public ACE_Task_Base { PutThread(std::shared_ptr ®Ptr, int min, int max) : m_regPtr(regPtr), m_min(min), m_max(max) {} - int svc(void) { + int svc(void) override { /** put some values into the cache. */ doNput(m_regPtr, m_max, m_min); /** do some gets... printing what we find in the cache. */ diff --git a/cppcache/integration-test/testRegionAccessThreadSafe.cpp b/cppcache/integration-test/testRegionAccessThreadSafe.cpp index cc0530ec26..e9e973c80d 100644 --- a/cppcache/integration-test/testRegionAccessThreadSafe.cpp +++ b/cppcache/integration-test/testRegionAccessThreadSafe.cpp @@ -36,7 +36,7 @@ class GetRegionThread : public ACE_Task_Base { m_regionCreateDone(false), m_subRegionCreateDone(false), m_mutex() {} - int svc(void) { + int svc(void) override { while (m_running == true) { SLEEP(40); try { diff --git a/cppcache/integration-test/testSpinLock.cpp b/cppcache/integration-test/testSpinLock.cpp index ec9306062b..1e99257ff0 100644 --- a/cppcache/integration-test/testSpinLock.cpp +++ b/cppcache/integration-test/testSpinLock.cpp @@ -46,7 +46,7 @@ class ThreadA : public ACE_Task_Base { public: ThreadA() : ACE_Task_Base() {} - int svc() { + int svc() override { { std::lock_guard lk(lock); LOG("ThreadA: Acquired lock x."); @@ -62,7 +62,7 @@ class ThreadB : public ACE_Task_Base { public: ThreadB() : ACE_Task_Base() {} - int svc() { + int svc() override { triggerB->acquire(); { std::lock_guard lk(lock); diff --git a/cppcache/integration-test/testThinClientConflation.cpp b/cppcache/integration-test/testThinClientConflation.cpp index 17954d5f33..b9a7bafa2e 100644 --- a/cppcache/integration-test/testThinClientConflation.cpp +++ b/cppcache/integration-test/testThinClientConflation.cpp @@ -61,11 +61,11 @@ class OperMonitor : public CacheListener { public: OperMonitor() : m_events(0), m_value(0) {} - ~OperMonitor() {} + ~OperMonitor() noexcept override = default; - virtual void afterCreate(const EntryEvent &event) { check(event); } + void afterCreate(const EntryEvent &event) override { check(event); } - virtual void afterUpdate(const EntryEvent &event) { check(event); } + void afterUpdate(const EntryEvent &event) override { check(event); } void validate(bool conflation) { LOG("validate called"); diff --git a/cppcache/integration-test/testThinClientCq.cpp b/cppcache/integration-test/testThinClientCq.cpp index 7a32dd3f5a..fbfe4092c9 100644 --- a/cppcache/integration-test/testThinClientCq.cpp +++ b/cppcache/integration-test/testThinClientCq.cpp @@ -16,13 +16,10 @@ */ #include "fw_dunit.hpp" #include -#include #include #include -#include #include #include -#include #include #define ROOT_NAME "TestThinClientCq" @@ -30,7 +27,6 @@ #include "CacheHelper.hpp" -#include "QueryStrings.hpp" #include "QueryHelper.hpp" #include @@ -161,15 +157,15 @@ class MyCqListener : public CqListener { } } - void onEvent(const CqEvent &cqe) { + void onEvent(const CqEvent &cqe) override { // LOG("MyCqListener::OnEvent called"); updateCount(cqe); } - void onError(const CqEvent &cqe) { + void onError(const CqEvent &cqe) override { updateCount(cqe); // LOG("MyCqListener::OnError called"); } - void close() { + void close() override { // LOG("MyCqListener::close called"); } }; @@ -225,20 +221,22 @@ class MyCqStatusListener : public CqStatusListener { } } - void onEvent(const CqEvent &cqe) { + void onEvent(const CqEvent &cqe) override { LOGINFO("MyCqStatusListener::OnEvent %d called", m_id); updateCount(cqe); } - void onError(const CqEvent &cqe) { + void onError(const CqEvent &cqe) override { updateCount(cqe); LOGINFO("MyCqStatusListener::OnError %d called", m_id); } - void close() { LOGINFO("MyCqStatusListener::close %d called", m_id); } - void onCqDisconnected() { + void close() override { + LOGINFO("MyCqStatusListener::close %d called", m_id); + } + void onCqDisconnected() override { LOGINFO("MyCqStatusListener %d got onCqDisconnected", m_id); m_cqsDisconnectedCount++; } - void onCqConnected() { + void onCqConnected() override { LOGINFO("MyCqStatusListener %d got onCqConnected", m_id); m_cqsConnectedCount++; } diff --git a/cppcache/integration-test/testThinClientCqDelta.cpp b/cppcache/integration-test/testThinClientCqDelta.cpp index b5577cc3c5..eb3eaea491 100644 --- a/cppcache/integration-test/testThinClientCqDelta.cpp +++ b/cppcache/integration-test/testThinClientCqDelta.cpp @@ -60,7 +60,7 @@ class CqDeltaListener : public CqListener { public: CqDeltaListener() : m_deltaCount(0), m_valueCount(0) {} - virtual void onEvent(const CqEvent &aCqEvent) { + void onEvent(const CqEvent &aCqEvent) override { auto deltaValue = aCqEvent.getDeltaValue(); DeltaTestImpl newValue; auto input = getHelper()->getCache()->createDataInput( diff --git a/cppcache/integration-test/testThinClientCqFailover.cpp b/cppcache/integration-test/testThinClientCqFailover.cpp index 09f56ddc02..fcf7662d72 100644 --- a/cppcache/integration-test/testThinClientCqFailover.cpp +++ b/cppcache/integration-test/testThinClientCqFailover.cpp @@ -94,7 +94,7 @@ class KillServerThread : public ACE_Task_Base { MyCqListener *m_listener; explicit KillServerThread(MyCqListener *listener) : m_running(false), m_listener(listener) {} - int svc(void) { + int svc(void) override { while (m_running == true) { CacheHelper::closeServer(1); LOG("THREAD CLOSED SERVER 1"); diff --git a/cppcache/integration-test/testThinClientCqHAFailover.cpp b/cppcache/integration-test/testThinClientCqHAFailover.cpp index 790f9a699b..d5bebbc453 100644 --- a/cppcache/integration-test/testThinClientCqHAFailover.cpp +++ b/cppcache/integration-test/testThinClientCqHAFailover.cpp @@ -93,7 +93,7 @@ class KillServerThread : public ACE_Task_Base { MyCqListener *m_listener; explicit KillServerThread(MyCqListener *listener) : m_running(false), m_listener(listener) {} - int svc(void) { + int svc(void) override { while (m_running == true) { CacheHelper::closeServer(1); LOG("THREAD CLOSED SERVER 1"); diff --git a/cppcache/integration-test/testThinClientHAQueryFailover.cpp b/cppcache/integration-test/testThinClientHAQueryFailover.cpp index 25e46d6f5b..f71c43ebda 100644 --- a/cppcache/integration-test/testThinClientHAQueryFailover.cpp +++ b/cppcache/integration-test/testThinClientHAQueryFailover.cpp @@ -62,7 +62,7 @@ class KillServerThread : public ACE_Task_Base { public: bool m_running; KillServerThread() : m_running(false) {} - int svc(void) { + int svc(void) override { while (m_running == true) { // CacheHelper::initServer( 2, "cacheserver_remoteoql2.xml"); // LOG("THREAD STARTED SERVER 2"); diff --git a/cppcache/integration-test/testThinClientInterest1Cacheless.cpp b/cppcache/integration-test/testThinClientInterest1Cacheless.cpp index 31819a7a63..fee5666270 100644 --- a/cppcache/integration-test/testThinClientInterest1Cacheless.cpp +++ b/cppcache/integration-test/testThinClientInterest1Cacheless.cpp @@ -46,8 +46,8 @@ class MyListener : public CacheListener { } } } - virtual void afterCreate(const EntryEvent &event) { checkEntry(event); } - virtual void afterUpdate(const EntryEvent &event) { checkEntry(event); } + void afterCreate(const EntryEvent &event) override { checkEntry(event); } + void afterUpdate(const EntryEvent &event) override { checkEntry(event); } inline bool gotAll() { for (int i = 0; i < 5; i++) { if (m_gotit[i] == 0) return false; diff --git a/cppcache/integration-test/testThinClientInterestNotify.cpp b/cppcache/integration-test/testThinClientInterestNotify.cpp index 9f8673b3d8..1fc0af2025 100644 --- a/cppcache/integration-test/testThinClientInterestNotify.cpp +++ b/cppcache/integration-test/testThinClientInterestNotify.cpp @@ -76,24 +76,24 @@ class EventListener : public CacheListener { m_destroys(0), m_name(name) {} - ~EventListener() {} + ~EventListener() noexcept override = default; - virtual void afterCreate(const EntryEvent &event) { + void afterCreate(const EntryEvent &event) override { check(event, "afterCreate"); m_creates++; } - virtual void afterUpdate(const EntryEvent &event) { + void afterUpdate(const EntryEvent &event) override { check(event, "afterUpdate"); m_updates++; } - virtual void afterInvalidate(const EntryEvent &event) { + void afterInvalidate(const EntryEvent &event) override { check(event, "afterInvalidate"); m_invalidates++; } - virtual void afterDestroy(const EntryEvent &event) { + void afterDestroy(const EntryEvent &event) override { check(event, "afterDestroy"); m_destroys++; } @@ -147,7 +147,6 @@ const char *keysForRegex[] = {"key-regex-1", "key-regex-2", "key-regex-3"}; #include "ThinClientDurableInit.hpp" #include "ThinClientTasks_C2S2.hpp" -#include "LocatorHelper.hpp" void initClientForInterestNotify(std::shared_ptr &mon1, std::shared_ptr &mon2, diff --git a/cppcache/integration-test/testThinClientListenerCallbackArgTest.cpp b/cppcache/integration-test/testThinClientListenerCallbackArgTest.cpp index 5f294ce6d2..5a0d854aa6 100644 --- a/cppcache/integration-test/testThinClientListenerCallbackArgTest.cpp +++ b/cppcache/integration-test/testThinClientListenerCallbackArgTest.cpp @@ -21,12 +21,9 @@ #include "ThinClientHelper.hpp" #include "TallyListener.hpp" #include "TallyWriter.hpp" -#include "testobject/PdxType.hpp" #include "testobject/VariousPdxTypes.hpp" -#include "SerializationRegistry.hpp" #include "CacheRegionHelper.hpp" -#include "CacheImpl.hpp" #define CLIENT1 s1p1 #define CLIENT2 s1p2 @@ -77,7 +74,7 @@ class CallbackListener : public CacheListener { LOG("CallbackListener contructor called"); } - virtual ~CallbackListener() {} + ~CallbackListener() noexcept override = default; int getCreates() { return m_creates; } @@ -142,30 +139,31 @@ class CallbackListener : public CacheListener { check(event.getCallbackArgument(), updateEvent); } - virtual void afterCreate(const EntryEvent &event) { + void afterCreate(const EntryEvent &event) override { checkcallbackArg(event, m_creates); } - virtual void afterUpdate(const EntryEvent &event) { + void afterUpdate(const EntryEvent &event) override { checkcallbackArg(event, m_updates); } - virtual void afterInvalidate(const EntryEvent &event) { + void afterInvalidate(const EntryEvent &event) override { checkcallbackArg(event, m_invalidates); } - virtual void afterDestroy(const EntryEvent &event) { + void afterDestroy(const EntryEvent &event) override { checkcallbackArg(event, m_destroys); } - virtual void afterRegionInvalidate(const RegionEvent &event) { + void afterRegionInvalidate(const RegionEvent &event) override { checkcallbackArg(event, m_regionInvalidate); } - virtual void afterRegionDestroy(const RegionEvent &event) { + void afterRegionDestroy(const RegionEvent &event) override { checkcallbackArg(event, m_regionDestroy); } - virtual void afterRegionClear(const RegionEvent &event) { + + void afterRegionClear(const RegionEvent &event) override { checkcallbackArg(event, m_regionClear); } }; diff --git a/cppcache/integration-test/testThinClientPRSingleHop.cpp b/cppcache/integration-test/testThinClientPRSingleHop.cpp index db00235084..4d71a66804 100644 --- a/cppcache/integration-test/testThinClientPRSingleHop.cpp +++ b/cppcache/integration-test/testThinClientPRSingleHop.cpp @@ -125,7 +125,7 @@ class putThread : public ACE_Task_Base { int getFailureCount() { return m_failureCount; } - int svc(void) { + int svc(void) override { std::shared_ptr keyPtr; for (int i = m_min; i < m_max; i++) { if (!m_isWarmUpTask) { diff --git a/cppcache/integration-test/testThinClientPoolAttrTest.cpp b/cppcache/integration-test/testThinClientPoolAttrTest.cpp index 1ea7eadd0e..d57f67ce4e 100644 --- a/cppcache/integration-test/testThinClientPoolAttrTest.cpp +++ b/cppcache/integration-test/testThinClientPoolAttrTest.cpp @@ -51,7 +51,7 @@ class putThread : public ACE_Task_Base { public: explicit putThread(const char *name) : regPtr(getHelper()->getRegion(name)) {} - int svc(void) { + int svc(void) override { // TODO: No. of connection should be = minConnection for (int i = 0; i < 10000; i++) { diff --git a/cppcache/integration-test/testThinClientPoolExecuteFunctionThrowsException.cpp b/cppcache/integration-test/testThinClientPoolExecuteFunctionThrowsException.cpp index 50e0727a80..3516783e9f 100644 --- a/cppcache/integration-test/testThinClientPoolExecuteFunctionThrowsException.cpp +++ b/cppcache/integration-test/testThinClientPoolExecuteFunctionThrowsException.cpp @@ -140,7 +140,6 @@ DUNIT_TASK_DEFINITION(CLIENT1, StartC1) // createRegionAndAttachPool(poolRegNames[0],USE_ACK, poolName); auto regPtr0 = createRegionAndAttachPool(poolRegNames[0], USE_ACK); - ; // getHelper()->createRegion( poolRegNames[0], USE_ACK); regPtr0->registerAllKeys(); LOG("Clnt1Init complete."); diff --git a/cppcache/integration-test/testThinClientPoolExecuteHAFunction.cpp b/cppcache/integration-test/testThinClientPoolExecuteHAFunction.cpp index e63bbb6093..76c019acec 100644 --- a/cppcache/integration-test/testThinClientPoolExecuteHAFunction.cpp +++ b/cppcache/integration-test/testThinClientPoolExecuteHAFunction.cpp @@ -151,7 +151,6 @@ DUNIT_TASK_DEFINITION(CLIENT1, StartC1) // createRegionAndAttachPool(poolRegNames[0],USE_ACK, poolName); auto regPtr0 = createRegionAndAttachPool(poolRegNames[0], USE_ACK); - ; // getHelper()->createRegion( poolRegNames[0], USE_ACK); regPtr0->registerAllKeys(); LOG("Clnt1Init complete."); diff --git a/cppcache/integration-test/testThinClientPoolServer.cpp b/cppcache/integration-test/testThinClientPoolServer.cpp index 65c1a2d6ce..f28590b158 100644 --- a/cppcache/integration-test/testThinClientPoolServer.cpp +++ b/cppcache/integration-test/testThinClientPoolServer.cpp @@ -187,7 +187,6 @@ DUNIT_MAIN { CALL_TASK(StartLocator1); CALL_TASK(StartServers); - ; CALL_TASK(StartClient1); CALL_TASK(StartClient2); CALL_TASK(CreateClient1Entries); diff --git a/cppcache/integration-test/testThinClientRemoteQueryFailover.cpp b/cppcache/integration-test/testThinClientRemoteQueryFailover.cpp index cae59fa032..7c899913bb 100644 --- a/cppcache/integration-test/testThinClientRemoteQueryFailover.cpp +++ b/cppcache/integration-test/testThinClientRemoteQueryFailover.cpp @@ -57,7 +57,7 @@ class KillServerThread : public ACE_Task_Base { public: bool m_running; KillServerThread() : m_running(false) {} - int svc(void) { + int svc(void) override { while (m_running == true) { CacheHelper::closeServer(1); LOG("THREAD CLOSED SERVER 1"); diff --git a/cppcache/integration-test/testThinClientRemoteQueryFailoverPdx.cpp b/cppcache/integration-test/testThinClientRemoteQueryFailoverPdx.cpp index eb486ac33e..8d09b94a1b 100644 --- a/cppcache/integration-test/testThinClientRemoteQueryFailoverPdx.cpp +++ b/cppcache/integration-test/testThinClientRemoteQueryFailoverPdx.cpp @@ -54,7 +54,7 @@ class KillServerThread : public ACE_Task_Base { public: bool m_running; KillServerThread() : m_running(false) {} - int svc(void) { + int svc(void) override { while (m_running == true) { CacheHelper::closeServer(1); LOG("THREAD CLOSED SERVER 1"); diff --git a/cppcache/integration-test/testThinClientTicket304.cpp b/cppcache/integration-test/testThinClientTicket304.cpp index 69531166ca..14370f2492 100644 --- a/cppcache/integration-test/testThinClientTicket304.cpp +++ b/cppcache/integration-test/testThinClientTicket304.cpp @@ -138,7 +138,6 @@ DUNIT_TASK_DEFINITION(CLIENT2, RegisterRegexClient2) } END_TASK_DEFINITION DUNIT_TASK_DEFINITION(CLIENT1, CreateRegionOnClient1) - ; { auto rptr = getHelper()->getRegion(regionNamesAuth[0]); rptr->localDestroyRegion(); @@ -147,7 +146,6 @@ DUNIT_TASK_DEFINITION(CLIENT1, CreateRegionOnClient1) } END_TASK_DEFINITION DUNIT_TASK_DEFINITION(CLIENT2, CreateRegionOnClient2) - ; { SLEEP(10000); createRegion(regionNamesAuth[0], false, true); diff --git a/cppcache/integration-test/testTimedSemaphore.cpp b/cppcache/integration-test/testTimedSemaphore.cpp index f559e59c23..0c8f5421e9 100644 --- a/cppcache/integration-test/testTimedSemaphore.cpp +++ b/cppcache/integration-test/testTimedSemaphore.cpp @@ -26,7 +26,7 @@ class ThreadAcquire : public ACE_Task_Base { m_acquireSecs(acquireSecs), m_status(0) {} - int svc() { + int svc() override { ACE_Time_Value start = ACE_OS::gettimeofday(); ACE_Time_Value interval(m_acquireSecs, 0); // 10 seconds ACE_Time_Value expireAt = start + interval; @@ -122,7 +122,6 @@ BEGIN_TEST(CheckResetAndTimedAcquire) sema.release(); sema.release(); while (sema.tryacquire() != -1) { - ; } thread->activate(); diff --git a/cppcache/integration/framework/.clang-tidy b/cppcache/integration/framework/.clang-tidy deleted file mode 100644 index 7346c5983b..0000000000 --- a/cppcache/integration/framework/.clang-tidy +++ /dev/null @@ -1,15 +0,0 @@ ---- -Checks: '-*,clang-diagnostic-*,clang-analyzer-*,-clang-analyzer-alpha*,google-*,-google-readability-todo,-google-runtime-references,-google-default-arguments,-clang-analyzer-unix.cstring.NullArg,-clang-analyzer-optin.cplusplus.VirtualCall' -WarningsAsErrors: '*' -HeaderFilterRegex: '.*' -FormatStyle: file -... - -# Disable Checks -# google-runtime-references - We have diverged from this rule due to both legacy and disagreement with the rule. -# google-readability-todo - Adds current user name when fix applied. - -# TODO - Fix these checks -# google-default-arguments -# clang-analyzer-unix.cstring.NullArg - Generates errors in Boost, how do we ignore? -# clang-analyzer-optin.cplusplus.VirtualCall - Boost.Process diff --git a/cppcache/integration/framework/Cluster.cpp b/cppcache/integration/framework/Cluster.cpp index f5105c8843..d53126dc35 100644 --- a/cppcache/integration/framework/Cluster.cpp +++ b/cppcache/integration/framework/Cluster.cpp @@ -17,8 +17,6 @@ #include "Cluster.h" -#include - #include #include @@ -47,14 +45,14 @@ Locator::Locator(Cluster &cluster, std::vector &locators, name_(std::move(name)), locators_(locators), jmxManagerPort_(jmxManagerPort), - distributedSystemId_(distributedSystemId){ + distributedSystemId_(distributedSystemId) { auto hostname = "localhost"; if (useIPv6) { hostname = "ip6-localhost"; } locatorAddress_ = LocatorAddress{hostname, port}; - for (uint16_t remotePort : remotePorts){ + for (uint16_t remotePort : remotePorts) { remoteLocatorsPorts_.push_back(remotePort); } } @@ -76,7 +74,7 @@ Locator::Locator(Locator &&move) remoteLocatorsPorts_(move.remoteLocatorsPorts_), jmxManagerPort_(move.jmxManagerPort_), started_(move.started_), - distributedSystemId_(move.distributedSystemId_){ + distributedSystemId_(move.distributedSystemId_) { move.started_ = false; } @@ -232,21 +230,26 @@ void Server::stop() { } Cluster::Cluster(LocatorCount initialLocators, ServerCount initialServers, - std::vector &locatorPorts, std::vector &remoteLocatorPort, - uint16_t distributedSystemId) : Cluster( - Name(std::string(::testing::UnitTest::GetInstance() - ->current_test_info() - ->test_case_name()) + - "/DS" + std::to_string(distributedSystemId) + "/" + - ::testing::UnitTest::GetInstance()->current_test_info()->name()), Classpath(""), - SecurityManager(""), User(""), Password(""), initialLocators, initialServers, - CacheXMLFiles({}), locatorPorts, remoteLocatorPort, distributedSystemId) {} + std::vector &locatorPorts, + std::vector &remoteLocatorPort, + uint16_t distributedSystemId) + : Cluster( + Name(std::string(::testing::UnitTest::GetInstance() + ->current_test_info() + ->test_suite_name()) + + "/DS" + std::to_string(distributedSystemId) + "/" + + ::testing::UnitTest::GetInstance()->current_test_info()->name()), + Classpath(""), SecurityManager(""), User(""), Password(""), + initialLocators, initialServers, CacheXMLFiles({}), locatorPorts, + remoteLocatorPort, distributedSystemId) {} Cluster::Cluster(Name name, Classpath classpath, SecurityManager securityManager, User user, Password password, LocatorCount initialLocators, ServerCount initialServers, - CacheXMLFiles cacheXMLFiles, std::vector &locatorPorts, - std::vector &remoteLocatorPort, uint16_t distributedSystemId) + CacheXMLFiles cacheXMLFiles, + std::vector &locatorPorts, + std::vector &remoteLocatorPort, + uint16_t distributedSystemId) : name_(name.get()), classpath_(classpath.get()), securityManager_(securityManager.get()), @@ -255,15 +258,14 @@ Cluster::Cluster(Name name, Classpath classpath, initialLocators_(initialLocators.get()), initialServers_(initialServers.get()), jmxManagerPort_(Framework::getAvailablePort()), - distributedSystemId_(distributedSystemId) - { + distributedSystemId_(distributedSystemId) { cacheXMLFiles_ = cacheXMLFiles.get(); useIPv6_ = false; - for(uint16_t port : locatorPorts){ + for (uint16_t port : locatorPorts) { locatorsPorts_.push_back(port); } - for(uint16_t port : remoteLocatorPort){ + for (uint16_t port : remoteLocatorPort) { remoteLocatorsPorts_.push_back(port); } removeServerDirectory(); @@ -274,7 +276,7 @@ Cluster::Cluster(LocatorCount initialLocators, ServerCount initialServers, : Cluster( Name(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), initialLocators, initialServers, useIPv6) {} @@ -286,7 +288,7 @@ Cluster::Cluster(LocatorCount initialLocators, ServerCount initialServers, CacheXMLFiles cacheXMLFiles) : name_(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), initialLocators_(initialLocators.get()), @@ -417,10 +419,10 @@ void Cluster::start(std::function extraGfshCommands) { locators_.reserve(initialLocators_); for (size_t i = 0; i < initialLocators_; i++) { uint16_t port; - if(locatorsPorts_.empty()){ - port=Framework::getAvailablePort(); - }else{ - port=locatorsPorts_.at(i); + if (locatorsPorts_.empty()) { + port = Framework::getAvailablePort(); + } else { + port = locatorsPorts_.at(i); } locators_.push_back({*this, locators_, @@ -432,10 +434,9 @@ void Cluster::start(std::function extraGfshCommands) { servers_.reserve(initialServers_); std::string xmlFile; for (size_t i = 0; i < initialServers_; i++) { - xmlFile = (cacheXMLFiles_.size() == 0) - ? "" - : cacheXMLFiles_.size() == 1 ? cacheXMLFiles_[0] - : cacheXMLFiles_[i]; + xmlFile = (cacheXMLFiles_.size() == 0) ? "" + : cacheXMLFiles_.size() == 1 ? cacheXMLFiles_[0] + : cacheXMLFiles_[i]; servers_.push_back({*this, locators_, name_ + "/server/" + std::to_string(i), xmlFile, @@ -524,13 +525,13 @@ void Cluster::usePropertiesFile(const std::string propertiesFile) { propertiesFile_ = propertiesFile; } -void Cluster::useSecurityPropertiesFile(const std::string securityPropertiesFile) { +void Cluster::useSecurityPropertiesFile( + const std::string securityPropertiesFile) { useSecurityPropertiesFile_ = true; securityPropertiesFile_ = securityPropertiesFile; } -void Cluster::useHostNameForClients( - const std::string hostName) { +void Cluster::useHostNameForClients(const std::string hostName) { usePropertiesFile_ = true; hostName_ = hostName; } diff --git a/cppcache/integration/test/AuthInitializeTest.cpp b/cppcache/integration/test/AuthInitializeTest.cpp index af85312b42..0666613302 100644 --- a/cppcache/integration/test/AuthInitializeTest.cpp +++ b/cppcache/integration/test/AuthInitializeTest.cpp @@ -101,7 +101,7 @@ TEST(AuthInitializeTest, putGetWithBasicAuth) { Cluster cluster( Name(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), Classpath{getFrameworkString(FrameworkVariable::JavaObjectJarPath)}, @@ -133,7 +133,7 @@ TEST(AuthInitializeTest, putWithBadUsername) { Cluster cluster( Name(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), Classpath{getFrameworkString(FrameworkVariable::JavaObjectJarPath)}, @@ -169,7 +169,7 @@ TEST(AuthInitializeTest, putWithBadPassword) { Cluster cluster( Name(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), Classpath{getFrameworkString(FrameworkVariable::JavaObjectJarPath)}, @@ -199,7 +199,7 @@ TEST(AuthInitializeTest, badCredentialsWithSubscriptionEnabled) { Cluster cluster( Name(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), Classpath{getFrameworkString(FrameworkVariable::JavaObjectJarPath)}, diff --git a/cppcache/integration/test/CqPlusAuthInitializeTest.cpp b/cppcache/integration/test/CqPlusAuthInitializeTest.cpp index a1358a38c1..0d23bbc2ae 100644 --- a/cppcache/integration/test/CqPlusAuthInitializeTest.cpp +++ b/cppcache/integration/test/CqPlusAuthInitializeTest.cpp @@ -98,7 +98,7 @@ TEST(CqPlusAuthInitializeTest, putInALoopWhileSubscribedAndAuthenticated) { Cluster cluster( Name(std::string(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()) + + ->test_suite_name()) + "/" + ::testing::UnitTest::GetInstance()->current_test_info()->name()), Classpath{getFrameworkString(FrameworkVariable::JavaObjectJarPath)}, diff --git a/cppcache/integration/test/PartitionRegionOpsTest.cpp b/cppcache/integration/test/PartitionRegionOpsTest.cpp index 8c9e6cdfc2..4801604d9a 100644 --- a/cppcache/integration/test/PartitionRegionOpsTest.cpp +++ b/cppcache/integration/test/PartitionRegionOpsTest.cpp @@ -50,7 +50,7 @@ using std::chrono::minutes; std::string getClientLogName() { std::string testSuiteName(::testing::UnitTest::GetInstance() ->current_test_info() - ->test_case_name()); + ->test_suite_name()); std::string testCaseName( ::testing::UnitTest::GetInstance()->current_test_info()->name()); std::string logFileName(testSuiteName + "/" + testCaseName + "/client.log"); @@ -61,7 +61,7 @@ Cache createCache() { using apache::geode::client::CacheFactory; auto cache = CacheFactory() - .set("log-level", "none") + .set("log-level", "debug") // needed for log checking .set("log-file", getClientLogName()) .set("statistic-sampling-enabled", "false") .create(); diff --git a/cppcache/integration/test/RegisterKeysTest.cpp b/cppcache/integration/test/RegisterKeysTest.cpp index 0c1a6fc8d1..4484edf905 100644 --- a/cppcache/integration/test/RegisterKeysTest.cpp +++ b/cppcache/integration/test/RegisterKeysTest.cpp @@ -175,7 +175,7 @@ TEST(RegisterKeysTest, RegisterAllWithConsistencyDisabled) { { std::unique_lock lock(cv_mutex); - EXPECT_EQ(cv.wait_for(lock, std::chrono::seconds(5)), + EXPECT_EQ(cv.wait_for(lock, std::chrono::minutes(1)), std::cv_status::no_timeout); } } diff --git a/cppcache/integration/test/TransactionsTest.cpp b/cppcache/integration/test/TransactionsTest.cpp index eacaa040e0..da37c2f8d8 100644 --- a/cppcache/integration/test/TransactionsTest.cpp +++ b/cppcache/integration/test/TransactionsTest.cpp @@ -18,6 +18,7 @@ #include #include +#include #include #include @@ -52,10 +53,15 @@ std::shared_ptr createPool(Cluster& cluster, void runClientOperations(std::shared_ptr cache, std::shared_ptr region, int minEntryKey, int maxEntryKey, int numTx) { + std::random_device randomDevice; + std::default_random_engine randomEngine(randomDevice()); + std::uniform_int_distribution distribution( + minEntryKey, maxEntryKey); + auto transactionManager = cache->getCacheTransactionManager(); for (int i = 0; i < numTx; i++) { - auto theKey = (rand() % (maxEntryKey - minEntryKey)) + minEntryKey; + auto theKey = distribution(randomEngine); std::string theValue = "theValue"; try { transactionManager->begin(); diff --git a/cppcache/src/AdminRegion.cpp b/cppcache/src/AdminRegion.cpp index 7c774f416a..4f217a5127 100644 --- a/cppcache/src/AdminRegion.cpp +++ b/cppcache/src/AdminRegion.cpp @@ -23,7 +23,6 @@ #include "TcrConnectionManager.hpp" #include "ThinClientPoolDM.hpp" #include "ThinClientRegion.hpp" -#include "statistics/StatisticsManager.hpp" #include "util/exception.hpp" namespace apache { @@ -51,14 +50,8 @@ std::shared_ptr AdminRegion::create(CacheImpl* cache, } void AdminRegion::init() { - /*TryWriteGuard _guard(m_rwLock, m_destroyPending); - if (m_destroyPending) { - return; - } - */ // Init distribution manager if it is not a pool - ThinClientPoolDM* pool = dynamic_cast(m_distMngr); - if (pool == nullptr) { + if (m_distMngr && !dynamic_cast(m_distMngr)) { m_distMngr->init(); } } diff --git a/cppcache/src/AuthenticatedView.cpp b/cppcache/src/AuthenticatedView.cpp index 35e6a55c15..888e68c712 100644 --- a/cppcache/src/AuthenticatedView.cpp +++ b/cppcache/src/AuthenticatedView.cpp @@ -22,13 +22,11 @@ #include #include #include -#include #include "CacheImpl.hpp" #include "CacheRegionHelper.hpp" #include "CacheXmlParser.hpp" #include "DistributedSystemImpl.hpp" -#include "FunctionServiceImpl.hpp" #include "ProxyRegion.hpp" #include "ProxyRemoteQueryService.hpp" #include "ThinClientPoolDM.hpp" diff --git a/cppcache/src/CacheXmlParser.hpp b/cppcache/src/CacheXmlParser.hpp index 439a00d283..c57289dc16 100644 --- a/cppcache/src/CacheXmlParser.hpp +++ b/cppcache/src/CacheXmlParser.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_CACHEXMLPARSER_H_ -#define GEODE_CACHEXMLPARSER_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_CACHEXMLPARSER_H_ +#define GEODE_CACHEXMLPARSER_H_ + #include #include #include @@ -53,10 +53,11 @@ using FactoryLoaderFn = std::function; class CacheXmlParser : public xercesc::DefaultHandler { void startElement(const XMLCh* const uri, const XMLCh* const localname, - const XMLCh* const qname, const xercesc::Attributes& attrs); + const XMLCh* const qname, + const xercesc::Attributes& attrs) override; void endElement(const XMLCh* const uri, const XMLCh* const localname, - const XMLCh* const qname); - void fatalError(const xercesc::SAXParseException&); + const XMLCh* const qname) override; + void fatalError(const xercesc::SAXParseException&) override; std::map> @@ -80,7 +81,7 @@ class CacheXmlParser : public xercesc::DefaultHandler { public: explicit CacheXmlParser(Cache* cache); - ~CacheXmlParser(); + ~CacheXmlParser() override; static CacheXmlParser* parse(const char* cachexml, Cache* cache); void parseFile(const char* filename); void parseMemory(const char* buffer, int size); @@ -170,6 +171,7 @@ class CacheXmlParser : public xercesc::DefaultHandler { } } }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/ConcurrentEntriesMap.cpp b/cppcache/src/ConcurrentEntriesMap.cpp index b1983f4142..cea4e8b0a8 100644 --- a/cppcache/src/ConcurrentEntriesMap.cpp +++ b/cppcache/src/ConcurrentEntriesMap.cpp @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + #include "ConcurrentEntriesMap.hpp" #include @@ -69,7 +70,7 @@ void ConcurrentEntriesMap::clear() { m_size = 0; } -ConcurrentEntriesMap::~ConcurrentEntriesMap() { delete[] m_segments; } +ConcurrentEntriesMap::~ConcurrentEntriesMap() noexcept { delete[] m_segments; } GfErrType ConcurrentEntriesMap::create( const std::shared_ptr& key, diff --git a/cppcache/src/ConcurrentEntriesMap.hpp b/cppcache/src/ConcurrentEntriesMap.hpp index 353d92efa2..bcd0cd56ac 100644 --- a/cppcache/src/ConcurrentEntriesMap.hpp +++ b/cppcache/src/ConcurrentEntriesMap.hpp @@ -19,6 +19,7 @@ #ifndef GEODE_CONCURRENTENTRIESMAP_H_ #define GEODE_CONCURRENTENTRIESMAP_H_ + #include #include @@ -31,12 +32,13 @@ namespace apache { namespace geode { namespace client { + class RegionInternal; /** * @brief Concurrent entries map. */ -class APACHE_GEODE_EXPORT ConcurrentEntriesMap : public EntriesMap { +class ConcurrentEntriesMap : public EntriesMap { protected: ExpiryTaskManager* m_expiryTaskManager; uint8_t m_concurrency; @@ -45,14 +47,13 @@ class APACHE_GEODE_EXPORT ConcurrentEntriesMap : public EntriesMap { RegionInternal* m_region; std::atomic m_numDestroyTrackers; bool m_concurrencyChecksEnabled; - // TODO: hashcode() is invoked 3-4 times -- need a better - // implementation (STLport hash_map?) that will invoke it only once + /** * Return a reference to the segment for which the given key would * be stored. */ - virtual MapSegment* segmentFor( - const std::shared_ptr& key) const { + MapSegment* segmentFor( + const std::shared_ptr& key) const override { return &(m_segments[segmentIdx(key)]); } @@ -80,101 +81,100 @@ class APACHE_GEODE_EXPORT ConcurrentEntriesMap : public EntriesMap { /** * Initialize segments with proper EntryFactory. */ - virtual void open(uint32_t initialCapacity); - - virtual void close(); - - virtual ~ConcurrentEntriesMap(); - - virtual void clear(); - - virtual GfErrType put(const std::shared_ptr& key, - const std::shared_ptr& newValue, - std::shared_ptr& me, - std::shared_ptr& oldValue, int updateCount, - int destroyTracker, - std::shared_ptr versionTag, - bool& isUpdate = EntriesMap::boolVal, - DataInput* delta = nullptr); - virtual GfErrType invalidate(const std::shared_ptr& key, - std::shared_ptr& me, - std::shared_ptr& oldValue, - std::shared_ptr versionTag); - virtual GfErrType create(const std::shared_ptr& key, - const std::shared_ptr& newValue, - std::shared_ptr& me, - std::shared_ptr& oldValue, - int updateCount, int destroyTracker, - std::shared_ptr versionTag); - virtual bool get(const std::shared_ptr& key, - std::shared_ptr& value, - std::shared_ptr& me); + void open(uint32_t initialCapacity) override; + + void close() override; + + ~ConcurrentEntriesMap() noexcept override; + + void clear() override; + + GfErrType put(const std::shared_ptr& key, + const std::shared_ptr& newValue, + std::shared_ptr& me, + std::shared_ptr& oldValue, int updateCount, + int destroyTracker, std::shared_ptr versionTag, + bool& isUpdate = EntriesMap::boolVal, + DataInput* delta = nullptr) override; + GfErrType invalidate(const std::shared_ptr& key, + std::shared_ptr& me, + std::shared_ptr& oldValue, + std::shared_ptr versionTag) override; + GfErrType create(const std::shared_ptr& key, + const std::shared_ptr& newValue, + std::shared_ptr& me, + std::shared_ptr& oldValue, int updateCount, + int destroyTracker, + std::shared_ptr versionTag) override; + bool get(const std::shared_ptr& key, + std::shared_ptr& value, + std::shared_ptr& me) override; /** * @brief get MapEntry for key. * TODO: return GfErrType like other methods */ - virtual void getEntry(const std::shared_ptr& key, - std::shared_ptr& result, - std::shared_ptr& value) const; + void getEntry(const std::shared_ptr& key, + std::shared_ptr& result, + std::shared_ptr& value) const override; /** * @brief remove the entry for key from the map. */ - virtual GfErrType remove(const std::shared_ptr& key, - std::shared_ptr& result, - std::shared_ptr& me, int updateCount, - std::shared_ptr versionTag, - bool afterRemote); + GfErrType remove(const std::shared_ptr& key, + std::shared_ptr& result, + std::shared_ptr& me, int updateCount, + std::shared_ptr versionTag, + bool afterRemote) override; /** * @brief return true if there exists an entry for the key. */ - virtual bool containsKey(const std::shared_ptr& key) const; + bool containsKey(const std::shared_ptr& key) const override; /** * @brief return the all the keys in a list. */ - virtual void getKeys( - std::vector>& result) const; + void getKeys( + std::vector>& result) const override; /** * @brief return all the entries in a list. */ - virtual void getEntries( - std::vector>& result) const; + void getEntries( + std::vector>& result) const override; /** * @brief return all values in a list. */ - virtual void getValues(std::vector>& result) const; + void getValues( + std::vector>& result) const override; /** * @brief return the number of entries in the map. */ - virtual uint32_t size() const; + uint32_t size() const override; - virtual int addTrackerForEntry(const std::shared_ptr& key, - std::shared_ptr& oldValue, - bool addIfAbsent, bool failIfPresent, - bool incUpdateCount); + int addTrackerForEntry(const std::shared_ptr& key, + std::shared_ptr& oldValue, bool addIfAbsent, + bool failIfPresent, bool incUpdateCount) override; - virtual void removeTrackerForEntry(const std::shared_ptr& key); + void removeTrackerForEntry(const std::shared_ptr& key) override; - virtual int addTrackerForAllEntries(MapOfUpdateCounters& updateCounterMap, - bool addDestroyTracking); + int addTrackerForAllEntries(MapOfUpdateCounters& updateCounterMap, + bool addDestroyTracking) override; - virtual void removeDestroyTracking(); - virtual void reapTombstones(std::map& gcVersions); + void removeDestroyTracking() override; + void reapTombstones(std::map& gcVersions) override; - virtual void reapTombstones(std::shared_ptr removedKeys); + void reapTombstones(std::shared_ptr removedKeys) override; /** * for internal testing, returns if an entry is a tombstone */ - virtual GfErrType isTombstone(std::shared_ptr& key, - std::shared_ptr& me, - bool& result); + GfErrType isTombstone(std::shared_ptr& key, + std::shared_ptr& me, + bool& result) override; /** * for internal testing, return the number of times any segment @@ -182,6 +182,7 @@ class APACHE_GEODE_EXPORT ConcurrentEntriesMap : public EntriesMap { */ uint32_t totalSegmentRehashes() const; }; // class EntriesMap + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/ConnectionQueue.hpp b/cppcache/src/ConnectionQueue.hpp index 66a6efa8d1..1467a0ece5 100644 --- a/cppcache/src/ConnectionQueue.hpp +++ b/cppcache/src/ConnectionQueue.hpp @@ -68,7 +68,7 @@ class ConnectionQueue { } condition_.notify_one(); } - if (delMp) { + if (delMp && mp) { mp->close(); delete mp; } diff --git a/cppcache/src/CqEventImpl.hpp b/cppcache/src/CqEventImpl.hpp index 2303ebd358..571f4f8c8d 100644 --- a/cppcache/src/CqEventImpl.hpp +++ b/cppcache/src/CqEventImpl.hpp @@ -45,46 +45,47 @@ class ThinClientBaseDM; class EventId; class CqEventImpl : public CqEvent { public: + CqEventImpl() = delete; CqEventImpl(std::shared_ptr& cQuery, CqOperation baseOp, CqOperation cqOp, std::shared_ptr& key, std::shared_ptr& value, ThinClientBaseDM* tcrdm, std::shared_ptr deltaBytes, std::shared_ptr eventId); + ~CqEventImpl() override = default; - std::shared_ptr getCq() const; + std::shared_ptr getCq() const override; /** * Get the operation on the base region that triggered this event. */ - CqOperation getBaseOperation() const; + CqOperation getBaseOperation() const override; /** * Get the the operation on the query results. Supported operations include * update, create, and destroy. */ - CqOperation getQueryOperation() const; + CqOperation getQueryOperation() const override; /** * Get the key relating to the event. * @return Object key. */ - std::shared_ptr getKey() const; + std::shared_ptr getKey() const override; /** * Get the new value of the modification. * If there is no new value because this is a delete, then * return null. */ - std::shared_ptr getNewValue() const; + std::shared_ptr getNewValue() const override; bool getError(); std::string toString(); - std::shared_ptr getDeltaValue() const; + std::shared_ptr getDeltaValue() const override; private: - CqEventImpl(); std::shared_ptr m_cQuery; CqOperation m_baseOp; CqOperation m_queryOp; diff --git a/cppcache/src/CqQueryImpl.cpp b/cppcache/src/CqQueryImpl.cpp index eec3da418f..2a5ddc094e 100644 --- a/cppcache/src/CqQueryImpl.cpp +++ b/cppcache/src/CqQueryImpl.cpp @@ -123,14 +123,7 @@ void CqQueryImpl::close(bool sendRequestToServer) { // Stat update. auto& stats = m_cqService->getCqServiceVsdStats(); - /* - if (isRunning()) { - stats.decNumCqsActive(); - } - else if (isStopped()) { - stats.decNumCqsStopped(); - } - */ + setCqState(CqState::CLOSING); if (sendRequestToServer == true) { try { diff --git a/cppcache/src/CqQueryVsdStats.cpp b/cppcache/src/CqQueryVsdStats.cpp index 54840abbbc..b7bdfe629e 100644 --- a/cppcache/src/CqQueryVsdStats.cpp +++ b/cppcache/src/CqQueryVsdStats.cpp @@ -62,7 +62,7 @@ CqQueryVsdStats::CqQueryVsdStats(StatisticsFactory* factory, m_cqQueryVsdStats->setInt(m_numEventsId, 0); } -CqQueryVsdStats::~CqQueryVsdStats() { +CqQueryVsdStats::~CqQueryVsdStats() noexcept { if (m_cqQueryVsdStats != nullptr) { // Don't Delete, Already closed, Just set nullptr // delete m_CqQueryVsdStats; diff --git a/cppcache/src/CqQueryVsdStats.hpp b/cppcache/src/CqQueryVsdStats.hpp index 715862d42a..3dda95e5b2 100644 --- a/cppcache/src/CqQueryVsdStats.hpp +++ b/cppcache/src/CqQueryVsdStats.hpp @@ -36,14 +36,14 @@ using statistics::StatisticDescriptor; using statistics::Statistics; using statistics::StatisticsType; -class APACHE_GEODE_EXPORT CqQueryVsdStats : public CqStatistics { +class CqQueryVsdStats : public CqStatistics { public: /** hold statistics for a cq. */ CqQueryVsdStats(statistics::StatisticsFactory* factory, const std::string& cqqueryName); /** disable stat collection for this item. */ - virtual ~CqQueryVsdStats(); + virtual ~CqQueryVsdStats() noexcept; void close() { m_cqQueryVsdStats->close(); } @@ -55,16 +55,16 @@ class APACHE_GEODE_EXPORT CqQueryVsdStats : public CqStatistics { inline void incNumEvents() { m_cqQueryVsdStats->incInt(m_numEventsId, 1); } - inline uint32_t numInserts() const { + uint32_t numInserts() const override { return m_cqQueryVsdStats->getInt(m_numInsertsId); } - inline uint32_t numUpdates() const { + uint32_t numUpdates() const override { return m_cqQueryVsdStats->getInt(m_numUpdatesId); } - inline uint32_t numDeletes() const { + uint32_t numDeletes() const override { return m_cqQueryVsdStats->getInt(m_numDeletesId); } - inline uint32_t numEvents() const { + uint32_t numEvents() const override { return m_cqQueryVsdStats->getInt(m_numEventsId); } diff --git a/cppcache/src/CqService.cpp b/cppcache/src/CqService.cpp index 57403e9fcf..d85c2ee5c2 100644 --- a/cppcache/src/CqService.cpp +++ b/cppcache/src/CqService.cpp @@ -26,8 +26,6 @@ #include "CqEventImpl.hpp" #include "CqQueryImpl.hpp" -#include "DistributedSystem.hpp" -#include "ReadWriteLock.hpp" #include "TcrConnectionManager.hpp" #include "ThinClientPoolDM.hpp" #include "util/exception.hpp" @@ -42,6 +40,8 @@ CqService::CqService(ThinClientBaseDM* tccdm, m_statisticsFactory(statisticsFactory), m_notificationSema(1), m_stats(std::make_shared(m_statisticsFactory)) { + assert(nullptr != m_tccdm); + m_running = true; LOGDEBUG("CqService Started"); } @@ -99,7 +99,7 @@ std::shared_ptr CqService::newCq( // Check if the subscription is enabled on the pool auto pool = dynamic_cast(m_tccdm); - if (pool != nullptr && !pool->getSubscriptionEnabled()) { + if (pool && !pool->getSubscriptionEnabled()) { LOGERROR( "Cannot create CQ because subscription is not enabled on the pool."); throw IllegalStateException( @@ -107,7 +107,7 @@ std::shared_ptr CqService::newCq( } // check for durable client - if (isDurable) { + if (isDurable && m_tccdm) { auto&& durableID = m_tccdm->getConnectionManager() .getCacheImpl() ->getDistributedSystem() diff --git a/cppcache/src/CqServiceVsdStats.cpp b/cppcache/src/CqServiceVsdStats.cpp index 397434b4bf..6a623a2588 100644 --- a/cppcache/src/CqServiceVsdStats.cpp +++ b/cppcache/src/CqServiceVsdStats.cpp @@ -68,7 +68,7 @@ CqServiceVsdStats::CqServiceVsdStats(StatisticsFactory* factory, m_cqServiceVsdStats->setInt(m_numCqsStoppedId, 0); } -CqServiceVsdStats::~CqServiceVsdStats() { +CqServiceVsdStats::~CqServiceVsdStats() noexcept { if (m_cqServiceVsdStats != nullptr) { // Don't Delete, Already closed, Just set nullptr // delete m_CqServiceVsdStats; diff --git a/cppcache/src/CqServiceVsdStats.hpp b/cppcache/src/CqServiceVsdStats.hpp index 5272ff10c2..3b324c406b 100644 --- a/cppcache/src/CqServiceVsdStats.hpp +++ b/cppcache/src/CqServiceVsdStats.hpp @@ -36,52 +36,47 @@ using statistics::StatisticDescriptor; using statistics::Statistics; using statistics::StatisticsType; -class APACHE_GEODE_EXPORT CqServiceVsdStats : public CqServiceStatistics { +class CqServiceVsdStats : public CqServiceStatistics { public: /** hold statistics for a cq. */ explicit CqServiceVsdStats(statistics::StatisticsFactory* factory, const std::string& cqName = "CqServiceVsdStats"); /** disable stat collection for this item. */ - virtual ~CqServiceVsdStats(); + ~CqServiceVsdStats() noexcept override; void close() { m_cqServiceVsdStats->close(); } - inline void decNumCqsActive() { - m_cqServiceVsdStats->incInt(m_numCqsActiveId, -1); - } inline void incNumCqsActive() const { m_cqServiceVsdStats->incInt(m_numCqsActiveId, 1); } - inline uint32_t numCqsActive() const { + inline uint32_t numCqsActive() const override { return m_cqServiceVsdStats->getInt(m_numCqsActiveId); } inline void incNumCqsCreated() { m_cqServiceVsdStats->incInt(m_numCqsCreatedId, 1); } - inline uint32_t numCqsCreated() const { + inline uint32_t numCqsCreated() const override { return m_cqServiceVsdStats->getInt(m_numCqsCreatedId); } - inline uint32_t numCqsOnClient() const { + inline uint32_t numCqsOnClient() const override { return m_cqServiceVsdStats->getInt(m_numCqsOnClientId); } inline void incNumCqsClosed() { m_cqServiceVsdStats->incInt(m_numCqsClosedId, 1); } - inline uint32_t numCqsClosed() const { + inline uint32_t numCqsClosed() const override { return m_cqServiceVsdStats->getInt(m_numCqsClosedId); } inline void incNumCqsStopped() { m_cqServiceVsdStats->incInt(m_numCqsStoppedId, 1); } - inline void decNumCqsStopped() { - m_cqServiceVsdStats->incInt(m_numCqsStoppedId, -1); - } - inline uint32_t numCqsStopped() const { + + inline uint32_t numCqsStopped() const override { return m_cqServiceVsdStats->getInt(m_numCqsStoppedId); } @@ -93,10 +88,6 @@ class APACHE_GEODE_EXPORT CqServiceVsdStats : public CqServiceStatistics { m_cqServiceVsdStats->setInt(m_numCqsOnClientId, value); } - inline void setNumCqsClosed(uint32_t value) { - m_cqServiceVsdStats->setInt(m_numCqsClosedId, value); - } - inline void setNumCqsStopped(uint32_t value) { m_cqServiceVsdStats->setInt(m_numCqsStoppedId, value); } diff --git a/cppcache/src/EntryExpiryHandler.hpp b/cppcache/src/EntryExpiryHandler.hpp index 5af8a866d0..2cb9c19e28 100644 --- a/cppcache/src/EntryExpiryHandler.hpp +++ b/cppcache/src/EntryExpiryHandler.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_ENTRYEXPIRYHANDLER_H_ -#define GEODE_ENTRYEXPIRYHANDLER_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_ENTRYEXPIRYHANDLER_H_ +#define GEODE_ENTRYEXPIRYHANDLER_H_ + #include #include #include @@ -28,13 +28,10 @@ #include "ExpMapEntry.hpp" #include "RegionInternal.hpp" -/** - * @file - */ - namespace apache { namespace geode { namespace client { + /** * @class EntryExpiryTask EntryExpiryTask.hpp * @@ -44,11 +41,8 @@ namespace client { * TODO: TODO: cleanup region entry nodes and handlers from expiry task * manager when region is destroyed */ -class APACHE_GEODE_EXPORT EntryExpiryHandler : public ACE_Event_Handler { +class EntryExpiryHandler : public ACE_Event_Handler { public: - /** - * Constructor - */ EntryExpiryHandler(std::shared_ptr& rptr, std::shared_ptr& entryPtr, ExpirationAction action, std::chrono::seconds duration); @@ -56,11 +50,12 @@ class APACHE_GEODE_EXPORT EntryExpiryHandler : public ACE_Event_Handler { /** This task object will be registered with the Timer Queue. * When the timer expires the handle_timeout is invoked. */ - int handle_timeout(const ACE_Time_Value& current_time, const void* arg); + int handle_timeout(const ACE_Time_Value& current_time, + const void* arg) override; /** * This is called when the task object needs to be cleaned up.. */ - int handle_close(ACE_HANDLE handle, ACE_Reactor_Mask close_mask); + int handle_close(ACE_HANDLE handle, ACE_Reactor_Mask close_mask) override; private: // The region which contains the entry @@ -75,6 +70,7 @@ class APACHE_GEODE_EXPORT EntryExpiryHandler : public ACE_Event_Handler { // perform the actual expiration action void DoTheExpirationAction(const std::shared_ptr& key); }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/ExpMapEntry.hpp b/cppcache/src/ExpMapEntry.hpp index f58a33ffb9..d532089d42 100644 --- a/cppcache/src/ExpMapEntry.hpp +++ b/cppcache/src/ExpMapEntry.hpp @@ -32,14 +32,16 @@ namespace client { * @brief Hold region mapped entry value. * This subclass adds expiration times. */ -class APACHE_GEODE_EXPORT ExpMapEntry : public MapEntryImpl, - public ExpEntryProperties { +class ExpMapEntry : public MapEntryImpl, public ExpEntryProperties { public: - virtual ~ExpMapEntry() {} + ~ExpMapEntry() noexcept override = default; - virtual ExpEntryProperties& getExpProperties() { return *this; } + ExpMapEntry(const ExpMapEntry&) = delete; + ExpMapEntry& operator=(const ExpMapEntry&) = delete; - virtual void cleanup(const CacheEventFlags eventFlags) { + ExpEntryProperties& getExpProperties() override { return *this; } + + virtual void cleanup(const CacheEventFlags eventFlags) override { if (!eventFlags.isExpiration()) { cancelExpiryTaskId(m_key); } @@ -52,15 +54,9 @@ class APACHE_GEODE_EXPORT ExpMapEntry : public MapEntryImpl, inline ExpMapEntry(ExpiryTaskManager* expiryTaskManager, const std::shared_ptr& key) : MapEntryImpl(key), ExpEntryProperties(expiryTaskManager) {} - - private: - // disabled - ExpMapEntry(const ExpMapEntry&); - ExpMapEntry& operator=(const ExpMapEntry&); }; -class APACHE_GEODE_EXPORT VersionedExpMapEntry : public ExpMapEntry, - public VersionStamp { +class VersionedExpMapEntry : public ExpMapEntry, public VersionStamp { public: inline VersionedExpMapEntry(ExpiryTaskManager* expiryTaskManager, const std::shared_ptr& key) @@ -68,25 +64,23 @@ class APACHE_GEODE_EXPORT VersionedExpMapEntry : public ExpMapEntry, inline explicit VersionedExpMapEntry(bool) : ExpMapEntry(true) {} - virtual ~VersionedExpMapEntry() {} + ~VersionedExpMapEntry() noexcept override {} - virtual VersionStamp& getVersionStamp() { return *this; } + VersionedExpMapEntry(const VersionedExpMapEntry&) = delete; + VersionedExpMapEntry& operator=(const VersionedExpMapEntry&) = delete; - private: - // disabled - VersionedExpMapEntry(const VersionedExpMapEntry&); - VersionedExpMapEntry& operator=(const VersionedExpMapEntry&); + VersionStamp& getVersionStamp() override { return *this; } }; -class APACHE_GEODE_EXPORT ExpEntryFactory : public EntryFactory { +class ExpEntryFactory : public EntryFactory { public: using EntryFactory::EntryFactory; - virtual ~ExpEntryFactory() {} + ~ExpEntryFactory() noexcept override {} - virtual void newMapEntry(ExpiryTaskManager* expiryTaskManager, - const std::shared_ptr& key, - std::shared_ptr& result) const; + void newMapEntry(ExpiryTaskManager* expiryTaskManager, + const std::shared_ptr& key, + std::shared_ptr& result) const override; }; } // namespace client } // namespace geode diff --git a/cppcache/src/ExpiryTaskManager.cpp b/cppcache/src/ExpiryTaskManager.cpp index 1e1cb5960c..bced26e201 100644 --- a/cppcache/src/ExpiryTaskManager.cpp +++ b/cppcache/src/ExpiryTaskManager.cpp @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + #include "ExpiryTaskManager.hpp" #include "DistributedSystem.hpp" @@ -37,13 +38,16 @@ namespace client { const char* ExpiryTaskManager::NC_ETM_Thread = "NC ETM Thread"; ExpiryTaskManager::ExpiryTaskManager() : m_reactorEventLoopRunning(false) { + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall): ACE auto timer = new GF_Timer_Heap_ImmediateReset(); m_timer = std::unique_ptr(timer); #if defined(_WIN32) m_reactor = new ACE_Reactor(new ACE_WFMO_Reactor(nullptr, m_timer.get()), 1); #elif defined(WITH_ACE_Select_Reactor) - m_reactor = - new ACE_Reactor(new ACE_Select_Reactor(nullptr, m_timer.get()), 1); + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall): ACE + auto aceSelectReactor = new ACE_Select_Reactor(nullptr, m_timer.get()); + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall): ACE + m_reactor = new ACE_Reactor(aceSelectReactor, 1); #else m_reactor = new ACE_Reactor(new ACE_Dev_Poll_Reactor(nullptr, m_timer.get()) 1); @@ -95,6 +99,8 @@ ExpiryTaskManager::~ExpiryTaskManager() { delete m_reactor; m_reactor = nullptr; + + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall): ACE } } // namespace client diff --git a/cppcache/src/ExpiryTaskManager.hpp b/cppcache/src/ExpiryTaskManager.hpp index ca99032bc6..bbce064afa 100644 --- a/cppcache/src/ExpiryTaskManager.hpp +++ b/cppcache/src/ExpiryTaskManager.hpp @@ -46,7 +46,7 @@ namespace client { * This class starts a reactor's event loop for taking care of expiry * tasks. The scheduling of event also happens through this manager. */ -class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { +class ExpiryTaskManager : public ACE_Task_Base { public: typedef decltype(std::declval().schedule_timer( nullptr, nullptr, std::declval())) id_type; @@ -85,7 +85,7 @@ class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { class GF_Timer_Heap_ImmediateReset_T : public ACE_Timer_Heap_T { public: - virtual int expire_single(ACE_Command_Base& pre_dispatch_command) { + int expire_single(ACE_Command_Base& pre_dispatch_command) override { ACE_TRACE("GF_Timer_Heap_ImmediateReset_T::expire_single"); ACE_Timer_Node_Dispatch_Info_T info; ACE_Time_Value cur_time; @@ -93,7 +93,7 @@ class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { // Create a scope for the lock ... { - ACE_MT(ACE_GUARD_RETURN(ACE_LOCK, ace_mon, this->mutex_, -1)); + ACE_MT(ACE_GUARD_RETURN(ACE_LOCK, ace_mon, this->mutex_, -1)) if (this->is_empty()) return 0; @@ -121,7 +121,7 @@ class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { // Create a scope for the lock ... { - ACE_MT(ACE_GUARD_RETURN(ACE_LOCK, ace_mon, this->mutex_, -1)); + ACE_MT(ACE_GUARD_RETURN(ACE_LOCK, ace_mon, this->mutex_, -1)) // Reschedule after doing the upcall in expire method // to let updated expiry interval, if any, take effect correctly @@ -147,13 +147,13 @@ class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { return 1; } - virtual int expire() { + int expire() override { return ACE_Timer_Queue_T::expire(); } - virtual int expire(const ACE_Time_Value& cur_time) { + int expire(const ACE_Time_Value& cur_time) override { ACE_TRACE("GF_Timer_Heap_ImmediateReset_T::expire"); - ACE_MT(ACE_GUARD_RETURN(ACE_LOCK, ace_mon, this->mutex_, -1)); + ACE_MT(ACE_GUARD_RETURN(ACE_LOCK, ace_mon, this->mutex_, -1)) // Keep looping while there are timers remaining and the earliest // timer is <= the passed in to the method. @@ -230,7 +230,7 @@ class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { * Destructor. Stops the reactors event loop if it is not running * and then exits. */ - ~ExpiryTaskManager(); + ~ExpiryTaskManager() override; /** * For scheduling a task for expiration. @@ -285,7 +285,7 @@ class APACHE_GEODE_EXPORT ExpiryTaskManager : public ACE_Task_Base { * is kept running unless explicitly stopped or when this object * goes out of scope. */ - int svc(); + int svc() override; /** * For explicitly stopping the reactor's event loop. diff --git a/cppcache/src/FunctionServiceImpl.cpp b/cppcache/src/FunctionServiceImpl.cpp deleted file mode 100644 index 1f91209412..0000000000 --- a/cppcache/src/FunctionServiceImpl.cpp +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "FunctionServiceImpl.hpp" - -#include - -#include "ExecutionImpl.hpp" - -namespace apache { -namespace geode { -namespace client { - -FunctionServiceImpl::FunctionServiceImpl(AuthenticatedView* authenticatedView) { - m_authenticatedView = authenticatedView; -} - -std::shared_ptr FunctionServiceImpl::getFunctionService( - AuthenticatedView* authenticatedView) { - return std::make_shared(authenticatedView); -} - -} // namespace client -} // namespace geode -} // namespace apache diff --git a/cppcache/src/FunctionServiceImpl.hpp b/cppcache/src/FunctionServiceImpl.hpp deleted file mode 100644 index 8c6e51aa71..0000000000 --- a/cppcache/src/FunctionServiceImpl.hpp +++ /dev/null @@ -1,60 +0,0 @@ -#pragma once - -#ifndef GEODE_FUNCTIONSERVICEIMPL_H_ -#define GEODE_FUNCTIONSERVICEIMPL_H_ - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include -#include -#include -/** - * @file - */ - -namespace apache { -namespace geode { -namespace client { -/** - * @class FunctionService FunctionService.hpp - * entry point for function execution - * @see Execution - */ - -class APACHE_GEODE_EXPORT FunctionServiceImpl : public FunctionService { - public: - explicit FunctionServiceImpl(AuthenticatedView* authenticatedView); - - virtual ~FunctionServiceImpl() {} - - private: - explicit FunctionServiceImpl(const FunctionService&); - FunctionServiceImpl& operator=(const FunctionService&); - - static std::shared_ptr getFunctionService( - AuthenticatedView* authenticatedView); - - AuthenticatedView* m_authenticatedView; - - friend class AuthenticatedView; -}; -} // namespace client -} // namespace geode -} // namespace apache - -#endif // GEODE_FUNCTIONSERVICEIMPL_H_ diff --git a/cppcache/src/LRUAction.hpp b/cppcache/src/LRUAction.hpp index 24142c4216..58434b2c83 100644 --- a/cppcache/src/LRUAction.hpp +++ b/cppcache/src/LRUAction.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_LRUACTION_H_ -#define GEODE_LRUACTION_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_LRUACTION_H_ +#define GEODE_LRUACTION_H_ + #include #include #include @@ -37,7 +37,7 @@ namespace client { * @brief abstract behavior for different eviction actions. */ class LRUEntriesMap; -class APACHE_GEODE_EXPORT LRUAction { +class LRUAction { protected: bool m_invalidates; bool m_destroys; @@ -77,7 +77,7 @@ class APACHE_GEODE_EXPORT LRUAction { RegionInternal* regionPtr, LRUEntriesMap* entriesMapPtr); - virtual ~LRUAction() {} + virtual ~LRUAction() noexcept = default; virtual bool evict(const std::shared_ptr& mePtr) = 0; @@ -95,7 +95,7 @@ class APACHE_GEODE_EXPORT LRUAction { /** * @brief LRUAction for destroy (distributed) */ -class APACHE_GEODE_EXPORT LRUDestroyAction : public virtual LRUAction { +class LRUDestroyAction : public virtual LRUAction { private: RegionInternal* m_regionPtr; @@ -106,9 +106,9 @@ class APACHE_GEODE_EXPORT LRUDestroyAction : public virtual LRUAction { } public: - virtual ~LRUDestroyAction() = default; + ~LRUDestroyAction() noexcept override = default; - virtual bool evict(const std::shared_ptr& mePtr) { + bool evict(const std::shared_ptr& mePtr) override { std::shared_ptr keyPtr; mePtr->getKeyI(keyPtr); std::shared_ptr versionTag; @@ -124,7 +124,7 @@ class APACHE_GEODE_EXPORT LRUDestroyAction : public virtual LRUAction { return (err == GF_NOERR); } - virtual LRUAction::Action getType() { return LRUAction::DESTROY; } + LRUAction::Action getType() override { return LRUAction::DESTROY; } friend class LRUAction; }; @@ -132,7 +132,7 @@ class APACHE_GEODE_EXPORT LRUDestroyAction : public virtual LRUAction { /** * @brief LRUAction for invalidate. */ -class APACHE_GEODE_EXPORT LRULocalInvalidateAction : public virtual LRUAction { +class LRULocalInvalidateAction : public virtual LRUAction { private: RegionInternal* m_regionPtr; @@ -142,11 +142,11 @@ class APACHE_GEODE_EXPORT LRULocalInvalidateAction : public virtual LRUAction { } public: - virtual ~LRULocalInvalidateAction() = default; + ~LRULocalInvalidateAction() noexcept override = default; - virtual bool evict(const std::shared_ptr& mePtr); + bool evict(const std::shared_ptr& mePtr) override; - virtual LRUAction::Action getType() { return LRUAction::LOCAL_INVALIDATE; } + LRUAction::Action getType() override { return LRUAction::LOCAL_INVALIDATE; } friend class LRUAction; }; @@ -154,7 +154,7 @@ class APACHE_GEODE_EXPORT LRULocalInvalidateAction : public virtual LRUAction { /** * @brief LRUAction for invalidate. */ -class APACHE_GEODE_EXPORT LRUOverFlowToDiskAction : public virtual LRUAction { +class LRUOverFlowToDiskAction : public virtual LRUAction { private: RegionInternal* m_regionPtr; LRUEntriesMap* m_entriesMapPtr; @@ -166,11 +166,11 @@ class APACHE_GEODE_EXPORT LRUOverFlowToDiskAction : public virtual LRUAction { } public: - virtual ~LRUOverFlowToDiskAction() {} + ~LRUOverFlowToDiskAction() noexcept override {} - virtual bool evict(const std::shared_ptr& mePtr); + bool evict(const std::shared_ptr& mePtr) override; - virtual LRUAction::Action getType() { return LRUAction::OVERFLOW_TO_DISK; } + LRUAction::Action getType() override { return LRUAction::OVERFLOW_TO_DISK; } friend class LRUAction; }; diff --git a/cppcache/src/LRUEntriesMap.cpp b/cppcache/src/LRUEntriesMap.cpp index 6b25985d69..2e9413f028 100644 --- a/cppcache/src/LRUEntriesMap.cpp +++ b/cppcache/src/LRUEntriesMap.cpp @@ -33,16 +33,16 @@ namespace client { /** * @brief LRUAction for testing map outside of a region.... */ -class APACHE_GEODE_EXPORT TestMapAction : public virtual LRUAction { +class TestMapAction : public virtual LRUAction { private: EntriesMap* m_eMap; public: explicit TestMapAction(EntriesMap* eMap) : m_eMap(eMap) { m_destroys = true; } - virtual ~TestMapAction() {} + ~TestMapAction() noexcept override = default; - virtual bool evict(const std::shared_ptr& mePtr) { + bool evict(const std::shared_ptr& mePtr) override { std::shared_ptr keyPtr; mePtr->getKeyI(keyPtr); /** @TODO try catch.... return true or false. */ @@ -52,7 +52,7 @@ class APACHE_GEODE_EXPORT TestMapAction : public virtual LRUAction { return (m_eMap->remove(keyPtr, cPtr, me, 0, versionTag, false) == GF_NOERR); } - virtual LRUAction::Action getType() { return LRUAction::LOCAL_DESTROY; } + LRUAction::Action getType() override { return LRUAction::LOCAL_DESTROY; } friend class LRUAction; }; @@ -104,7 +104,7 @@ void LRUEntriesMap::clear() { ConcurrentEntriesMap::clear(); } -LRUEntriesMap::~LRUEntriesMap() { delete m_action; } +LRUEntriesMap::~LRUEntriesMap() noexcept { delete m_action; } /** * @brief put an item in the map... if it is a new entry, then the LRU may @@ -201,7 +201,6 @@ GfErrType LRUEntriesMap::invalidate(const std::shared_ptr& key, std::shared_ptr& me, std::shared_ptr& oldValue, std::shared_ptr versionTag) { - int64_t newSize = 0; MapSegment* segmentRPtr = segmentFor(key); bool isTokenAdded = false; GfErrType err = @@ -236,7 +235,7 @@ GfErrType LRUEntriesMap::invalidate(const std::shared_ptr& key, if (!isOldValueToken) { --m_validEntries; lru_queue_.remove(me); - newSize = CacheableToken::invalid()->objectSize(); + auto newSize = CacheableToken::invalid()->objectSize(); if (oldValue != nullptr) { newSize -= oldValue->objectSize(); } else { diff --git a/cppcache/src/LRUEntriesMap.hpp b/cppcache/src/LRUEntriesMap.hpp index 2c7f0e916f..f04c0a7fd0 100644 --- a/cppcache/src/LRUEntriesMap.hpp +++ b/cppcache/src/LRUEntriesMap.hpp @@ -35,27 +35,13 @@ namespace apache { namespace geode { namespace client { + class EvictionController; /** * @brief Concurrent entries map with LRU behavior. * Not designed for subclassing... */ - -/* adongre - * CID 28728: Other violation (MISSING_COPY) - * Class "apache::geode::client::LRUEntriesMap" owns resources that are managed - * in its - * constructor and destructor but has no user-written copy constructor. - * - * FIX : Make the class non copyable - * - * CID 28714: Other violation (MISSING_ASSIGN) - * Class "apache::geode::client::LRUEntriesMap" owns resources that are managed - * in - * its constructor and destructor but has no user-written assignment operator. - * Fix : Make the class Non Assinable - */ class APACHE_GEODE_EXPORT LRUEntriesMap : public ConcurrentEntriesMap { protected: using spinlock_mutex = ::apache::geode::util::concurrent::spinlock_mutex; @@ -80,32 +66,31 @@ class APACHE_GEODE_EXPORT LRUEntriesMap : public ConcurrentEntriesMap { const uint32_t limit, bool concurrencyChecksEnabled, const uint8_t concurrency = 16, bool heapLRUEnabled = false); - virtual ~LRUEntriesMap(); - - virtual GfErrType put(const std::shared_ptr& key, - const std::shared_ptr& newValue, - std::shared_ptr& me, - std::shared_ptr& oldValue, int updateCount, - int destroyTracker, - std::shared_ptr versionTag, - bool& isUpdate = EntriesMap::boolVal, - DataInput* delta = nullptr); - virtual GfErrType invalidate(const std::shared_ptr& key, - std::shared_ptr& me, - std::shared_ptr& oldValue, - std::shared_ptr versionTag); - virtual GfErrType create(const std::shared_ptr& key, - const std::shared_ptr& newValue, - std::shared_ptr& me, - std::shared_ptr& oldValue, - int updateCount, int destroyTracker, - std::shared_ptr versionTag); - virtual bool get(const std::shared_ptr& key, - std::shared_ptr& value, - std::shared_ptr& me); - virtual std::shared_ptr getFromDisk( + ~LRUEntriesMap() noexcept override; + + GfErrType put(const std::shared_ptr& key, + const std::shared_ptr& newValue, + std::shared_ptr& me, + std::shared_ptr& oldValue, int updateCount, + int destroyTracker, std::shared_ptr versionTag, + bool& isUpdate = EntriesMap::boolVal, + DataInput* delta = nullptr) override; + GfErrType invalidate(const std::shared_ptr& key, + std::shared_ptr& me, + std::shared_ptr& oldValue, + std::shared_ptr versionTag) override; + GfErrType create(const std::shared_ptr& key, + const std::shared_ptr& newValue, + std::shared_ptr& me, + std::shared_ptr& oldValue, int updateCount, + int destroyTracker, + std::shared_ptr versionTag) override; + bool get(const std::shared_ptr& key, + std::shared_ptr& value, + std::shared_ptr& me) override; + std::shared_ptr getFromDisk( const std::shared_ptr& key, - std::shared_ptr& me) const; + std::shared_ptr& me) const override; GfErrType processLRU(); void processLRU(int32_t numEntriesToEvict); GfErrType evictionHelper(); @@ -115,16 +100,12 @@ class APACHE_GEODE_EXPORT LRUEntriesMap : public ConcurrentEntriesMap { m_pmPtr = pmPtr; } - /** - * @brief remove an entry, marking it evicted for LRUList maintainance. - */ - virtual GfErrType remove(const std::shared_ptr& key, - std::shared_ptr& result, - std::shared_ptr& me, int updateCount, - std::shared_ptr versionTag, - bool afterRemote); - - virtual void close(); + GfErrType remove(const std::shared_ptr& key, + std::shared_ptr& result, + std::shared_ptr& me, int updateCount, + std::shared_ptr versionTag, + bool afterRemote) override; + void close() override; inline bool mustEvict() const { if (m_action == nullptr) { @@ -144,9 +125,10 @@ class APACHE_GEODE_EXPORT LRUEntriesMap : public ConcurrentEntriesMap { inline void adjustLimit(uint32_t limit) { m_limit = limit; } - virtual void clear(); + void clear() override; }; // class LRUEntriesMap + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/LRUExpMapEntry.hpp b/cppcache/src/LRUExpMapEntry.hpp index d462bc72e3..ebf5685fa6 100644 --- a/cppcache/src/LRUExpMapEntry.hpp +++ b/cppcache/src/LRUExpMapEntry.hpp @@ -32,17 +32,20 @@ namespace client { /** * @brief Hold region mapped entry value and lru information. */ -class APACHE_GEODE_EXPORT LRUExpMapEntry : public MapEntryImpl, - public LRUEntryProperties, - public ExpEntryProperties { +class LRUExpMapEntry : public MapEntryImpl, + public LRUEntryProperties, + public ExpEntryProperties { public: - virtual ~LRUExpMapEntry() {} + LRUExpMapEntry(const LRUExpMapEntry&) = delete; + LRUExpMapEntry& operator=(const LRUExpMapEntry&) = delete; - virtual LRUEntryProperties& getLRUProperties() { return *this; } + ~LRUExpMapEntry() noexcept override = default; - virtual ExpEntryProperties& getExpProperties() { return *this; } + LRUEntryProperties& getLRUProperties() override { return *this; } - virtual void cleanup(const CacheEventFlags eventFlags) { + ExpEntryProperties& getExpProperties() override { return *this; } + + void cleanup(const CacheEventFlags eventFlags) override { if (!eventFlags.isExpiration()) { cancelExpiryTaskId(m_key); } @@ -57,19 +60,16 @@ class APACHE_GEODE_EXPORT LRUExpMapEntry : public MapEntryImpl, inline LRUExpMapEntry(ExpiryTaskManager* expiryTaskManager, const std::shared_ptr& key) : MapEntryImpl(key), ExpEntryProperties(expiryTaskManager) {} - - private: - // disabled - LRUExpMapEntry(const LRUExpMapEntry&); - LRUExpMapEntry& operator=(const LRUExpMapEntry&); }; -class APACHE_GEODE_EXPORT VersionedLRUExpMapEntry : public LRUExpMapEntry, - public VersionStamp { +class VersionedLRUExpMapEntry : public LRUExpMapEntry, public VersionStamp { public: - virtual ~VersionedLRUExpMapEntry() {} + VersionedLRUExpMapEntry(const VersionedLRUExpMapEntry&) = delete; + VersionedLRUExpMapEntry& operator=(const VersionedLRUExpMapEntry&) = delete; - virtual VersionStamp& getVersionStamp() { return *this; } + ~VersionedLRUExpMapEntry() noexcept override = default; + + VersionStamp& getVersionStamp() override { return *this; } protected: inline explicit VersionedLRUExpMapEntry(bool) : LRUExpMapEntry(true) {} @@ -77,22 +77,17 @@ class APACHE_GEODE_EXPORT VersionedLRUExpMapEntry : public LRUExpMapEntry, inline VersionedLRUExpMapEntry(ExpiryTaskManager* expiryTaskManager, const std::shared_ptr& key) : LRUExpMapEntry(expiryTaskManager, key) {} - - private: - // disabled - VersionedLRUExpMapEntry(const VersionedLRUExpMapEntry&); - VersionedLRUExpMapEntry& operator=(const VersionedLRUExpMapEntry&); }; -class APACHE_GEODE_EXPORT LRUExpEntryFactory : public EntryFactory { +class LRUExpEntryFactory : public EntryFactory { public: using EntryFactory::EntryFactory; - virtual ~LRUExpEntryFactory() {} + ~LRUExpEntryFactory() noexcept override = default; - virtual void newMapEntry(ExpiryTaskManager* expiryTaskManager, - const std::shared_ptr& key, - std::shared_ptr& result) const; + void newMapEntry(ExpiryTaskManager* expiryTaskManager, + const std::shared_ptr& key, + std::shared_ptr& result) const override; }; } // namespace client } // namespace geode diff --git a/cppcache/src/LRULocalDestroyAction.hpp b/cppcache/src/LRULocalDestroyAction.hpp index 4ce209d825..d42f2364ca 100644 --- a/cppcache/src/LRULocalDestroyAction.hpp +++ b/cppcache/src/LRULocalDestroyAction.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_LRULOCALDESTROYACTION_H_ -#define GEODE_LRULOCALDESTROYACTION_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_LRULOCALDESTROYACTION_H_ +#define GEODE_LRULOCALDESTROYACTION_H_ + #include #include @@ -46,13 +46,14 @@ class APACHE_GEODE_EXPORT LRULocalDestroyAction : public virtual LRUAction { } public: - virtual ~LRULocalDestroyAction() {} + ~LRULocalDestroyAction() noexcept override = default; - virtual bool evict(const std::shared_ptr& mePtr); + bool evict(const std::shared_ptr& mePtr) override; - virtual LRUAction::Action getType() { return LRUAction::LOCAL_DESTROY; } + LRUAction::Action getType() override { return LRUAction::LOCAL_DESTROY; } friend class LRUAction; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/LRUMapEntry.hpp b/cppcache/src/LRUMapEntry.hpp index bd16b56925..a9bc1f6659 100644 --- a/cppcache/src/LRUMapEntry.hpp +++ b/cppcache/src/LRUMapEntry.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_LRUMAPENTRY_H_ -#define GEODE_LRUMAPENTRY_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_LRUMAPENTRY_H_ +#define GEODE_LRUMAPENTRY_H_ + #include #include @@ -30,6 +30,7 @@ namespace apache { namespace geode { namespace client { + /** * This template class adds the recently used, eviction bits and persistence * info to the MapEntry class. The earlier design looked like below: @@ -60,14 +61,16 @@ namespace client { * * */ -class APACHE_GEODE_EXPORT LRUMapEntry : public MapEntryImpl, - public LRUEntryProperties { +class LRUMapEntry : public MapEntryImpl, public LRUEntryProperties { public: - virtual ~LRUMapEntry() {} + LRUMapEntry(const LRUMapEntry&) = delete; + LRUMapEntry& operator=(const LRUMapEntry&) = delete; + + ~LRUMapEntry() noexcept override = default; - virtual LRUEntryProperties& getLRUProperties() { return *this; } + LRUEntryProperties& getLRUProperties() override { return *this; } - virtual void cleanup(const CacheEventFlags eventFlags) { + void cleanup(const CacheEventFlags eventFlags) override { if (!eventFlags.isEviction()) { // TODO: this needs an implementation of doubly-linked list // to remove from the list; also add this to LRUExpMapEntry since MI @@ -81,42 +84,35 @@ class APACHE_GEODE_EXPORT LRUMapEntry : public MapEntryImpl, inline explicit LRUMapEntry(const std::shared_ptr& key) : MapEntryImpl(key) {} - - private: - // disabled - LRUMapEntry(const LRUMapEntry&); - LRUMapEntry& operator=(const LRUMapEntry&); }; -class APACHE_GEODE_EXPORT VersionedLRUMapEntry : public LRUMapEntry, - public VersionStamp { +class VersionedLRUMapEntry : public LRUMapEntry, public VersionStamp { public: - virtual ~VersionedLRUMapEntry() {} + VersionedLRUMapEntry(const VersionedLRUMapEntry&) = delete; + VersionedLRUMapEntry& operator=(const VersionedLRUMapEntry&) = delete; + + ~VersionedLRUMapEntry() noexcept override = default; - virtual VersionStamp& getVersionStamp() { return *this; } + VersionStamp& getVersionStamp() override { return *this; } protected: inline explicit VersionedLRUMapEntry(bool) : LRUMapEntry(true) {} inline explicit VersionedLRUMapEntry(const std::shared_ptr& key) : LRUMapEntry(key) {} - - private: - // disabled - VersionedLRUMapEntry(const VersionedLRUMapEntry&); - VersionedLRUMapEntry& operator=(const VersionedLRUMapEntry&); }; -class APACHE_GEODE_EXPORT LRUEntryFactory : public EntryFactory { +class LRUEntryFactory : public EntryFactory { public: using EntryFactory::EntryFactory; - virtual ~LRUEntryFactory() {} + ~LRUEntryFactory() noexcept override = default; - virtual void newMapEntry(ExpiryTaskManager* expiryTaskManager, - const std::shared_ptr& key, - std::shared_ptr& result) const; + void newMapEntry(ExpiryTaskManager* expiryTaskManager, + const std::shared_ptr& key, + std::shared_ptr& result) const override; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/LocalRegion.cpp b/cppcache/src/LocalRegion.cpp index 8ce50d89e0..13267ebb54 100644 --- a/cppcache/src/LocalRegion.cpp +++ b/cppcache/src/LocalRegion.cpp @@ -17,7 +17,6 @@ #include "LocalRegion.hpp" -#include #include #include @@ -26,6 +25,7 @@ #include "CacheImpl.hpp" #include "CacheRegionHelper.hpp" #include "CacheableToken.hpp" +#include "EntriesMapFactory.hpp" #include "EntryExpiryHandler.hpp" #include "ExpiryTaskManager.hpp" #include "LRUEntriesMap.hpp" @@ -93,7 +93,7 @@ LocalRegion::LocalRegion(const std::string& name, CacheImpl* cacheImpl, m_regionStats = new RegionStats( cacheImpl->getStatisticsManager().getStatisticsFactory(), m_fullPath); - auto p = cacheImpl->getPoolManager().find(getAttributes().getPoolName()); + auto p = cacheImpl->getPoolManager().find(m_regionAttributes.getPoolName()); setPool(p); } @@ -733,6 +733,8 @@ void LocalRegion::registerEntryExpiryTask( LocalRegion::~LocalRegion() noexcept { TryWriteGuard guard(m_rwLock, m_destroyPending); if (!m_destroyPending) { + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) release(false); } m_listener = nullptr; diff --git a/cppcache/src/LocalRegion.hpp b/cppcache/src/LocalRegion.hpp index 1076d0acf3..057276b0ad 100644 --- a/cppcache/src/LocalRegion.hpp +++ b/cppcache/src/LocalRegion.hpp @@ -33,19 +33,15 @@ #include #include #include -#include #include #include #include #include -#include "CacheableToken.hpp" -#include "EntriesMapFactory.hpp" +#include "EntriesMap.hpp" #include "EventType.hpp" -#include "ExpMapEntry.hpp" #include "RegionInternal.hpp" #include "RegionStats.hpp" -#include "SerializationRegistry.hpp" #include "TSSTXStateWrapper.hpp" #include "TombstoneList.hpp" #include "util/synchronized_map.hpp" @@ -55,21 +51,25 @@ namespace geode { namespace client { #ifndef CHECK_DESTROY_PENDING -#define CHECK_DESTROY_PENDING(lock, function) \ - lock checkGuard(m_rwLock, m_destroyPending); \ - if (m_destroyPending) { \ - std::string err_msg = #function; \ - err_msg += ": region " + m_fullPath + " destroyed"; \ - throw RegionDestroyedException(err_msg.c_str()); \ - } +#define CHECK_DESTROY_PENDING(lock, function) \ + lock checkGuard(m_rwLock, m_destroyPending); \ + do { \ + if (m_destroyPending) { \ + std::string err_msg = #function; \ + err_msg += ": region " + m_fullPath + " destroyed"; \ + throw RegionDestroyedException(err_msg.c_str()); \ + } \ + } while (0) #endif #ifndef CHECK_DESTROY_PENDING_NOTHROW -#define CHECK_DESTROY_PENDING_NOTHROW(lock) \ - lock checkGuard(m_rwLock, m_destroyPending); \ - if (m_destroyPending) { \ - return GF_CACHE_REGION_DESTROYED_EXCEPTION; \ - } +#define CHECK_DESTROY_PENDING_NOTHROW(lock) \ + lock checkGuard(m_rwLock, m_destroyPending); \ + do { \ + if (m_destroyPending) { \ + return GF_CACHE_REGION_DESTROYED_EXCEPTION; \ + } \ + } while (0) #endif class PutActions; diff --git a/cppcache/src/MapEntry.hpp b/cppcache/src/MapEntry.hpp index 14bec140f0..76b0690acb 100644 --- a/cppcache/src/MapEntry.hpp +++ b/cppcache/src/MapEntry.hpp @@ -48,8 +48,10 @@ class CacheImpl; * @brief This class encapsulates expiration specific properties for * a MapEntry. */ -class APACHE_GEODE_EXPORT ExpEntryProperties { +class ExpEntryProperties { public: + ~ExpEntryProperties() noexcept = default; + typedef std::chrono::system_clock::time_point time_point; inline explicit ExpEntryProperties(ExpiryTaskManager* expiryTaskManager) @@ -122,7 +124,7 @@ class APACHE_GEODE_EXPORT ExpEntryProperties { */ class APACHE_GEODE_EXPORT MapEntry { public: - virtual ~MapEntry() {} + virtual ~MapEntry() noexcept = default; virtual void getKey(std::shared_ptr& result) const = 0; virtual void getValue(std::shared_ptr& result) const = 0; @@ -191,7 +193,7 @@ class APACHE_GEODE_EXPORT MapEntry { virtual void cleanup(const CacheEventFlags eventFlags) = 0; protected: - inline MapEntry() {} + inline MapEntry() = default; inline explicit MapEntry(bool) {} }; @@ -203,7 +205,7 @@ class APACHE_GEODE_EXPORT MapEntry { class MapEntryImpl : public MapEntry, public std::enable_shared_from_this { public: - ~MapEntryImpl() override = default; + ~MapEntryImpl() noexcept override = default; MapEntryImpl(const MapEntryImpl&) = delete; MapEntryImpl& operator=(const MapEntryImpl&) = delete; @@ -270,12 +272,14 @@ class MapEntryImpl : public MapEntry, std::shared_ptr m_key; }; -class APACHE_GEODE_EXPORT VersionedMapEntryImpl : public MapEntryImpl, - public VersionStamp { +class VersionedMapEntryImpl : public MapEntryImpl, public VersionStamp { public: - virtual ~VersionedMapEntryImpl() {} + ~VersionedMapEntryImpl() noexcept override = default; + + VersionedMapEntryImpl(const VersionedMapEntryImpl&) = delete; + VersionedMapEntryImpl& operator=(const VersionedMapEntryImpl&) = delete; - virtual VersionStamp& getVersionStamp() { return *this; } + VersionStamp& getVersionStamp() override { return *this; } protected: inline explicit VersionedMapEntryImpl(bool) : MapEntryImpl(true) {} @@ -283,19 +287,14 @@ class APACHE_GEODE_EXPORT VersionedMapEntryImpl : public MapEntryImpl, inline explicit VersionedMapEntryImpl( const std::shared_ptr& key) : MapEntryImpl(key) {} - - private: - // disabled - VersionedMapEntryImpl(const VersionedMapEntryImpl&); - VersionedMapEntryImpl& operator=(const VersionedMapEntryImpl&); }; -class APACHE_GEODE_EXPORT EntryFactory { +class EntryFactory { public: explicit EntryFactory(const bool concurrencyChecksEnabled) : m_concurrencyChecksEnabled(concurrencyChecksEnabled) {} - virtual ~EntryFactory() {} + virtual ~EntryFactory() noexcept = default; virtual void newMapEntry(ExpiryTaskManager* expiryTaskManager, const std::shared_ptr& key, @@ -304,6 +303,7 @@ class APACHE_GEODE_EXPORT EntryFactory { protected: bool m_concurrencyChecksEnabled; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/PdxRemotePreservedData.hpp b/cppcache/src/PdxRemotePreservedData.hpp index 5b39819e30..711b694c47 100644 --- a/cppcache/src/PdxRemotePreservedData.hpp +++ b/cppcache/src/PdxRemotePreservedData.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_PDXREMOTEPRESERVEDDATA_H_ -#define GEODE_PDXREMOTEPRESERVEDDATA_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_PDXREMOTEPRESERVEDDATA_H_ +#define GEODE_PDXREMOTEPRESERVEDDATA_H_ + #include #include @@ -30,6 +30,7 @@ namespace apache { namespace geode { namespace client { + class PdxRemotePreservedData; class PdxRemotePreservedData : public PdxUnreadFields { @@ -38,25 +39,15 @@ class PdxRemotePreservedData : public PdxUnreadFields { int32_t m_typeId; int32_t m_mergedTypeId; int32_t m_currentIndex; - std::shared_ptr /*Object^*/ m_owner; + std::shared_ptr m_owner; ExpiryTaskManager::id_type m_expiryTakId; public: PdxRemotePreservedData() - : /* adongre - Coverity II - * CID 29283: Uninitialized scalar field (UNINIT_CTOR) - */ - m_typeId(0), - m_mergedTypeId(0), - m_currentIndex(0), - m_expiryTakId(0) {} - - virtual ~PdxRemotePreservedData() { - /*for(int i=0;i owner) { m_typeId = typeId; diff --git a/cppcache/src/RemoteQuery.cpp b/cppcache/src/RemoteQuery.cpp index a29f80ca8b..a1e3e203f4 100644 --- a/cppcache/src/RemoteQuery.cpp +++ b/cppcache/src/RemoteQuery.cpp @@ -70,6 +70,7 @@ std::shared_ptr RemoteQuery::execute( pool->getStats().incQueryExecutionId(); } /*get the start time for QueryExecutionTime stat*/ + // NOLINTNEXTLINE(clang-analyzer-core.CallAndMessage) bool enableTimeStatistics = tcdm->getConnectionManager() .getCacheImpl() ->getDistributedSystem() @@ -140,12 +141,11 @@ GfErrType RemoteQuery::executeNoThrow( msg.setTimeout(timeout); reply.setTimeout(timeout); - GfErrType err = GF_NOERR; LOGFINEST("%s: sending request for query: %s", func, m_queryString.c_str()); if (tcdm == nullptr) { tcdm = m_tccdm; } - err = tcdm->sendSyncRequest(msg, reply); + auto err = tcdm->sendSyncRequest(msg, reply); if (err != GF_NOERR) { return err; } @@ -164,12 +164,11 @@ GfErrType RemoteQuery::executeNoThrow( msg.setTimeout(timeout); reply.setTimeout(timeout); - GfErrType err = GF_NOERR; LOGFINEST("%s: sending request for query: %s", func, m_queryString.c_str()); if (tcdm == nullptr) { tcdm = m_tccdm; } - err = tcdm->sendSyncRequest(msg, reply); + auto err = tcdm->sendSyncRequest(msg, reply); if (err != GF_NOERR) { return err; } diff --git a/cppcache/src/SerializationRegistry.cpp b/cppcache/src/SerializationRegistry.cpp index 151a643e4e..a6d81c784d 100644 --- a/cppcache/src/SerializationRegistry.cpp +++ b/cppcache/src/SerializationRegistry.cpp @@ -680,7 +680,6 @@ void DataSerializableHandler::serialize( ptr->toDelta(dataOutput); } else { dataSerializable->toData(dataOutput); - ; } } diff --git a/cppcache/src/ServerLocation.hpp b/cppcache/src/ServerLocation.hpp index 91cd670ac4..46097abeca 100644 --- a/cppcache/src/ServerLocation.hpp +++ b/cppcache/src/ServerLocation.hpp @@ -42,7 +42,7 @@ class APACHE_GEODE_EXPORT ServerLocation : Serializable(), m_serverName(std::move(serverName)), m_port(port) { LOGDEBUG( "ServerLocation::ServerLocation(): creating ServerLocation for %s:%d", - serverName.c_str(), port); + m_serverName.c_str(), port); makeEpString(); } @@ -64,11 +64,6 @@ class APACHE_GEODE_EXPORT ServerLocation const std::string& getServerName() const { return m_serverName; } - void setServername(std::string serverName) { - m_serverName = std::move(serverName); - makeEpString(); - } - int getPort() const { return m_port; } void toData(DataOutput& output) const override { diff --git a/cppcache/src/SystemProperties.cpp b/cppcache/src/SystemProperties.cpp index a3184c9832..ec8e4f0ab7 100644 --- a/cppcache/src/SystemProperties.cpp +++ b/cppcache/src/SystemProperties.cpp @@ -23,15 +23,12 @@ #include #include #include -#include #include "CppCacheLibrary.hpp" #include "util/Log.hpp" #if defined(_WIN32) #include -#else -#include #endif namespace { @@ -134,12 +131,6 @@ namespace apache { namespace geode { namespace client { -namespace impl { - -void* getFactoryFunc(const char* lib, const char* funcName); - -} // namespace impl - SystemProperties::SystemProperties( const std::shared_ptr& propertiesPtr, const std::string& configFile) @@ -191,8 +182,10 @@ SystemProperties::SystemProperties( public: explicit ProcessPropsVisitor(SystemProperties* sysProps) : m_sysProps(sysProps) {} + ~ProcessPropsVisitor() noexcept override = default; + void visit(const std::shared_ptr& key, - const std::shared_ptr& value) { + const std::shared_ptr& value) override { auto property = key->toString(); std::string val; if (value != nullptr) { diff --git a/cppcache/src/TXId.cpp b/cppcache/src/TXId.cpp index 3b8cc9008c..869395107e 100644 --- a/cppcache/src/TXId.cpp +++ b/cppcache/src/TXId.cpp @@ -14,12 +14,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -/* - * TXId.cpp - * - * Created on: 07-Feb-2011 - * Author: ankurs - */ #include "TXId.hpp" @@ -36,7 +30,7 @@ TXId& TXId::operator=(const TXId& other) { return *this; } -TXId::~TXId() {} +TXId::~TXId() noexcept = default; int32_t TXId::getId() { return m_TXId; } } // namespace client diff --git a/cppcache/src/TXId.hpp b/cppcache/src/TXId.hpp index 2f2d280e78..1cb13e9381 100644 --- a/cppcache/src/TXId.hpp +++ b/cppcache/src/TXId.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_TXID_H_ -#define GEODE_TXID_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -19,12 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -/* - * TXId.h - * - * Created on: 07-Feb-2011 - * Author: ankurs - */ + +#pragma once + +#ifndef GEODE_TXID_H_ +#define GEODE_TXID_H_ #include @@ -41,7 +35,7 @@ class TXId : public apache::geode::client::TransactionId { TXId& operator=(const TXId&); - virtual ~TXId(); + ~TXId() noexcept override; int32_t getId(); diff --git a/cppcache/src/TcrChunkedContext.hpp b/cppcache/src/TcrChunkedContext.hpp index 17b42c6c5a..00e22d13f7 100644 --- a/cppcache/src/TcrChunkedContext.hpp +++ b/cppcache/src/TcrChunkedContext.hpp @@ -56,7 +56,7 @@ class TcrChunkedResult { m_ex(nullptr), m_inSameThread(false), m_dsmemId(0) {} - virtual ~TcrChunkedResult() {} + virtual ~TcrChunkedResult() noexcept {} void setFinalizeSemaphore(ACE_Semaphore* finalizeSema) { m_finalizeSema = finalizeSema; } diff --git a/cppcache/src/TcrConnection.cpp b/cppcache/src/TcrConnection.cpp index 40d123840b..f73707e70e 100644 --- a/cppcache/src/TcrConnection.cpp +++ b/cppcache/src/TcrConnection.cpp @@ -69,6 +69,7 @@ int expiryTimeVariancePercentage() { srand(static_cast((now_s * 1000) + (now_ms / 1000))); const int numbers = 21; + // NOLINTNEXTLINE(clang-analyzer-security.insecureAPI.rand): TODO replace int random = rand() % numbers + 1; if (random > 10) { @@ -989,12 +990,6 @@ std::shared_ptr TcrConnection::readHandshakeRawData( } } -std::shared_ptr TcrConnection::readHandshakeByteArray( - std::chrono::microseconds connectTimeout) { - uint32_t arraySize = readHandshakeArraySize(connectTimeout); - return readHandshakeRawData(arraySize, connectTimeout); -} - // read a byte array int32_t TcrConnection::readHandshakeArraySize( std::chrono::microseconds connectTimeout) { @@ -1073,37 +1068,6 @@ void TcrConnection::readHandShakeBytes( _GEODE_SAFE_DELETE_ARRAY(recvMessage); } -int32_t TcrConnection::readHandShakeInt( - std::chrono::microseconds connectTimeout) { - ConnErrType error = CONN_NOERR; - uint8_t* recvMessage; - _GEODE_NEW(recvMessage, uint8_t[4]); - - if ((error = receiveData(reinterpret_cast(recvMessage), 4, - connectTimeout)) != CONN_NOERR) { - if (error & CONN_TIMEOUT) { - _GEODE_SAFE_DELETE_ARRAY(recvMessage); - m_conn.reset(); - throwException( - TimeoutException("TcrConnection::TcrConnection: " - "Timeout in handshake")); - } else { - _GEODE_SAFE_DELETE_ARRAY(recvMessage); - m_conn.reset(); - throwException( - GeodeIOException("TcrConnection::TcrConnection: " - "Handshake failure")); - } - } - - auto di = m_connectionManager.getCacheImpl()->createDataInput(recvMessage, 4); - int32_t val = di.readInt32(); - - _GEODE_SAFE_DELETE_ARRAY(recvMessage); - - return val; -} - std::shared_ptr TcrConnection::readHandshakeString( std::chrono::microseconds connectTimeout) { ConnErrType error = CONN_NOERR; diff --git a/cppcache/src/TcrConnection.hpp b/cppcache/src/TcrConnection.hpp index 857ff0df4c..acb974e9b8 100644 --- a/cppcache/src/TcrConnection.hpp +++ b/cppcache/src/TcrConnection.hpp @@ -300,11 +300,6 @@ class TcrConnection { */ uint8_t getOverrides(const SystemProperties* props); - /** - * To read the from stream - */ - int32_t readHandShakeInt(std::chrono::microseconds connectTimeout); - /* * To read the arraysize */ @@ -341,13 +336,6 @@ class TcrConnection { std::shared_ptr readHandshakeString( std::chrono::microseconds connectTimeout); - /** - * Reads a byte array (using initial length) from socket and handles error - * conditions in case of Handshake. - */ - std::shared_ptr readHandshakeByteArray( - std::chrono::microseconds connectTimeout); - /** * Send data to the connection till sendTimeout */ diff --git a/cppcache/src/TcrConnectionManager.cpp b/cppcache/src/TcrConnectionManager.cpp index 7159a30171..7e7adeb691 100644 --- a/cppcache/src/TcrConnectionManager.cpp +++ b/cppcache/src/TcrConnectionManager.cpp @@ -26,15 +26,11 @@ #include "CacheImpl.hpp" #include "ExpiryHandler_T.hpp" #include "ExpiryTaskManager.hpp" -#include "RemoteQueryService.hpp" -#include "ServerLocation.hpp" #include "TcrConnection.hpp" #include "TcrEndpoint.hpp" #include "TcrHADistributionManager.hpp" #include "ThinClientBaseDM.hpp" -#include "ThinClientCacheDistributionManager.hpp" #include "ThinClientHARegion.hpp" -#include "ThinClientLocatorHelper.hpp" #include "ThinClientRedundancyManager.hpp" #include "ThinClientRegion.hpp" #include "Utils.hpp" @@ -46,7 +42,6 @@ namespace client { volatile bool TcrConnectionManager::TEST_DURABLE_CLIENT_CRASH = false; -const char *TcrConnectionManager::NC_Redundancy = "NC Redundancy"; const char *TcrConnectionManager::NC_Failover = "NC Failover"; const char *TcrConnectionManager::NC_CleanUp = "NC CleanUp"; @@ -69,9 +64,6 @@ TcrConnectionManager::TcrConnectionManager(CacheImpl *cache) new ThinClientRedundancyManager(this)); } -ExpiryTaskManager::id_type TcrConnectionManager::getPingTaskId() { - return m_pingTaskId; -} void TcrConnectionManager::init(bool isPool) { if (!m_initGuard) { m_initGuard = true; @@ -299,7 +291,6 @@ void TcrConnectionManager::failover(std::atomic &isRunning) { it->failover(); } while (m_failoverSema.tryacquire() != -1) { - ; } } catch (const Exception &e) { LOGERROR(e.what()); @@ -334,19 +325,13 @@ GfErrType TcrConnectionManager::registerInterestAllRegions( // TcrHADistributionManagers). GfErrType err = GF_NOERR; - GfErrType opErr = GF_NOERR; std::lock_guard guard(m_distMngrsLock); - std::list::iterator begin = m_distMngrs.begin(); - std::list::iterator end = m_distMngrs.end(); - for (std::list::iterator it = begin; it != end; ++it) { - TcrHADistributionManager *tcrHADM = - dynamic_cast(*it); - if (tcrHADM != nullptr) { - if ((opErr = tcrHADM->registerInterestForRegion(ep, request, reply)) != - GF_NOERR) { - if (err == GF_NOERR) { - err = opErr; - } + + for (const auto &it : m_distMngrs) { + if (auto tcrHADM = dynamic_cast(it)) { + auto opErr = tcrHADM->registerInterestForRegion(ep, request, reply); + if (err == GF_NOERR) { + err = opErr; } } } @@ -429,7 +414,6 @@ void TcrConnectionManager::redundancy(std::atomic &isRunning) { if (isRunning && !m_isNetDown) { m_redundancyManager->maintainRedundancyLevel(); while (m_redundancySema.tryacquire() != -1) { - ; } } } @@ -458,7 +442,6 @@ void TcrConnectionManager::cleanup(std::atomic &isRunning) { cleanNotificationLists(); while (m_cleanupSema.tryacquire() != -1) { - ; } } while (isRunning); diff --git a/cppcache/src/TcrConnectionManager.hpp b/cppcache/src/TcrConnectionManager.hpp index 682e090db1..393d472eb5 100644 --- a/cppcache/src/TcrConnectionManager.hpp +++ b/cppcache/src/TcrConnectionManager.hpp @@ -66,7 +66,6 @@ class TcrConnectionManager { int checkConnection(const ACE_Time_Value&, const void*); int checkRedundancy(const ACE_Time_Value&, const void*); int processEventIdMap(const ACE_Time_Value&, const void*); - ExpiryTaskManager::id_type getPingTaskId(); void close(); void readyForEvents(); @@ -185,7 +184,6 @@ class TcrConnectionManager { friend class DistManagersLockGuard; friend class ThinClientPoolDM; friend class ThinClientPoolHADM; - static const char* NC_Redundancy; static const char* NC_Failover; static const char* NC_CleanUp; }; diff --git a/cppcache/src/TcrDistributionManager.hpp b/cppcache/src/TcrDistributionManager.hpp index 2a7907d79e..56b525daee 100644 --- a/cppcache/src/TcrDistributionManager.hpp +++ b/cppcache/src/TcrDistributionManager.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_TCRDISTRIBUTIONMANAGER_H_ -#define GEODE_TCRDISTRIBUTIONMANAGER_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_TCRDISTRIBUTIONMANAGER_H_ +#define GEODE_TCRDISTRIBUTIONMANAGER_H_ + #include #include "ThinClientDistributionManager.hpp" @@ -29,6 +29,7 @@ namespace geode { namespace client { class ThinClientRegion; + /** * @brief Distribute data between caches */ @@ -36,23 +37,23 @@ class TcrDistributionManager : public ThinClientDistributionManager { public: TcrDistributionManager(ThinClientRegion* region, TcrConnectionManager& connManager); + TcrDistributionManager(const TcrDistributionManager&) = delete; + TcrDistributionManager& operator=(const TcrDistributionManager&) = delete; + ~TcrDistributionManager() noexcept override = default; protected: - virtual void getEndpointNames(std::unordered_set& endpointNames); - - virtual void postUnregisterAction(); + void getEndpointNames( + std::unordered_set& endpointNames) override; - virtual bool preFailoverAction(); + void postUnregisterAction() override; - virtual bool postFailoverAction(TcrEndpoint* endpoint); + bool preFailoverAction() override; - virtual void destroyAction(); + bool postFailoverAction(TcrEndpoint* endpoint) override; - private: - // Disallow copy constructor and assignment operator. - TcrDistributionManager(const TcrDistributionManager&); - TcrDistributionManager& operator=(const TcrDistributionManager&); + void destroyAction() override; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/TcrEndpoint.cpp b/cppcache/src/TcrEndpoint.cpp index 0ef91e2fb7..dce9b7da97 100644 --- a/cppcache/src/TcrEndpoint.cpp +++ b/cppcache/src/TcrEndpoint.cpp @@ -94,6 +94,8 @@ TcrEndpoint::~TcrEndpoint() { m_name.c_str()); // fail in dev build to track #295 better in regressions m_numRegionListener = 0; + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) closeNotification(); } } diff --git a/cppcache/src/ThinClientBaseDM.cpp b/cppcache/src/ThinClientBaseDM.cpp index 4acc0d234d..a47be6d98e 100644 --- a/cppcache/src/ThinClientBaseDM.cpp +++ b/cppcache/src/ThinClientBaseDM.cpp @@ -41,7 +41,7 @@ ThinClientBaseDM::ThinClientBaseDM(TcrConnectionManager& connManager, m_chunks(true), m_chunkProcessor(nullptr) {} -ThinClientBaseDM::~ThinClientBaseDM() = default; +ThinClientBaseDM::~ThinClientBaseDM() noexcept = default; void ThinClientBaseDM::init() { const auto& systemProperties = m_connManager.getCacheImpl() @@ -72,8 +72,7 @@ void ThinClientBaseDM::destroy(bool) { GfErrType ThinClientBaseDM::sendSyncRequestRegisterInterest( TcrMessage& request, TcrMessageReply& reply, bool attemptFailover, ThinClientRegion*, TcrEndpoint* endpoint) { - GfErrType err = GF_NOERR; - + GfErrType err; if (endpoint == nullptr) { err = sendSyncRequest(request, reply, attemptFailover); } else { @@ -150,10 +149,9 @@ GfErrType ThinClientBaseDM::handleEPError(TcrEndpoint* ep, GfErrType ThinClientBaseDM::sendRequestToEndPoint(const TcrMessage& request, TcrMessageReply& reply, TcrEndpoint* ep) { - GfErrType error = GF_NOERR; LOGDEBUG("ThinClientBaseDM::sendRequestToEP: invoking endpoint send for: %s", ep->name().c_str()); - error = ep->send(request, reply); + auto error = ep->send(request, reply); LOGDEBUG( "ThinClientBaseDM::sendRequestToEP: completed endpoint send for: %s " "[error:%d]", @@ -262,9 +260,8 @@ void ThinClientBaseDM::beforeSendingRequest(const TcrMessage& request, this->isSecurityOn(), this->isMultiUserMode(), request.getMessageType()); if (!(request.isMetaRegion()) && TcrMessage::isUserInitiativeOps(request) && (this->isSecurityOn() || this->isMultiUserMode())) { - int64_t connId = 0; + int64_t connId; int64_t uniqueId = 0; - if (!this->isMultiUserMode()) { connId = conn->getConnectionId(); uniqueId = conn->getEndpointObject()->getUniqueId(); diff --git a/cppcache/src/ThinClientBaseDM.hpp b/cppcache/src/ThinClientBaseDM.hpp index 61c3fe32ce..57ae3b67c4 100644 --- a/cppcache/src/ThinClientBaseDM.hpp +++ b/cppcache/src/ThinClientBaseDM.hpp @@ -49,7 +49,7 @@ class EventId; class ThinClientBaseDM { public: ThinClientBaseDM(TcrConnectionManager& connManager, ThinClientRegion* region); - virtual ~ThinClientBaseDM() = 0; + virtual ~ThinClientBaseDM() noexcept; virtual void init(); virtual void destroy(bool keepalive = false); diff --git a/cppcache/src/ThinClientCacheDistributionManager.hpp b/cppcache/src/ThinClientCacheDistributionManager.hpp index 29898e8465..d6edc9263c 100644 --- a/cppcache/src/ThinClientCacheDistributionManager.hpp +++ b/cppcache/src/ThinClientCacheDistributionManager.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_THINCLIENTCACHEDISTRIBUTIONMANAGER_H_ -#define GEODE_THINCLIENTCACHEDISTRIBUTIONMANAGER_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -20,6 +15,11 @@ * limitations under the License. */ +#pragma once + +#ifndef GEODE_THINCLIENTCACHEDISTRIBUTIONMANAGER_H_ +#define GEODE_THINCLIENTCACHEDISTRIBUTIONMANAGER_H_ + #include #include @@ -29,37 +29,38 @@ namespace apache { namespace geode { namespace client { -/** - * @brief Distribute data between caches - */ + class TcrMessage; class TcrConnection; +/** + * @brief Distribute data between caches + */ class ThinClientCacheDistributionManager : public ThinClientDistributionManager { public: explicit ThinClientCacheDistributionManager( TcrConnectionManager& connManager); - ~ThinClientCacheDistributionManager() {} + ~ThinClientCacheDistributionManager() override {} - void init(); + void init() override; virtual GfErrType sendSyncRequest(TcrMessage& request, TcrMessageReply& reply, bool attemptFailover = true, - bool isBGThread = false); + bool isBGThread = false) override; GfErrType sendSyncRequestCq(TcrMessage& request, TcrMessageReply& reply); GfErrType sendRequestToPrimary(TcrMessage& request, TcrMessageReply& reply); protected: - virtual bool preFailoverAction(); - virtual bool postFailoverAction(TcrEndpoint* endpoint); + bool preFailoverAction() override; + bool postFailoverAction(TcrEndpoint* endpoint) override; - private: - // Disallow default/copy constructor and assignment operator. - ThinClientCacheDistributionManager(); - ThinClientCacheDistributionManager(const ThinClientCacheDistributionManager&); + public: + ThinClientCacheDistributionManager() = delete; + ThinClientCacheDistributionManager( + const ThinClientCacheDistributionManager&) = delete; ThinClientCacheDistributionManager& operator=( - const ThinClientCacheDistributionManager&); + const ThinClientCacheDistributionManager&) = delete; }; } // namespace client diff --git a/cppcache/src/ThinClientDistributionManager.hpp b/cppcache/src/ThinClientDistributionManager.hpp index b1657e1526..a00b64d9b1 100644 --- a/cppcache/src/ThinClientDistributionManager.hpp +++ b/cppcache/src/ThinClientDistributionManager.hpp @@ -32,7 +32,7 @@ class ThinClientDistributionManager : public ThinClientBaseDM { public: ThinClientDistributionManager(TcrConnectionManager& connManager, ThinClientRegion* region); - ~ThinClientDistributionManager() override = default; + ~ThinClientDistributionManager() noexcept override = default; void init() override; void destroy(bool keepalive = false) override; @@ -81,6 +81,7 @@ class ThinClientDistributionManager : public ThinClientBaseDM { std::vector m_endpoints; std::recursive_mutex m_endpointsLock; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/ThinClientPoolDM.cpp b/cppcache/src/ThinClientPoolDM.cpp index 52eca1e84a..aaccde25cb 100644 --- a/cppcache/src/ThinClientPoolDM.cpp +++ b/cppcache/src/ThinClientPoolDM.cpp @@ -20,7 +20,6 @@ #include #include -#include #include #include @@ -237,6 +236,8 @@ void ThinClientPoolDM::init() { } ThinClientPoolDM::~ThinClientPoolDM() { + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) destroy(); _GEODE_SAFE_DELETE(m_locHelper); _GEODE_SAFE_DELETE(m_stats); @@ -817,6 +818,8 @@ void ThinClientPoolDM::destroy(bool keepAlive) { m_PoolStatsSampler = nullptr; } LOGDEBUG("PoolStatsSampler thread closed ."); + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) stopCliCallbackThread(); LOGDEBUG("ThinClientPoolDM::destroy( ): Closing connection manager."); auto cacheImpl = m_connManager.getCacheImpl(); @@ -831,7 +834,11 @@ void ThinClientPoolDM::destroy(bool keepAlive) { } LOGDEBUG("Closing PoolStatsSampler thread."); + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) stopPingThread(); + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) stopUpdateLocatorListThread(); if (m_clientMetadataService) { @@ -847,6 +854,8 @@ void ThinClientPoolDM::destroy(bool keepAlive) { LOGDEBUG("ThinClientPoolDM::destroy( ): after close "); // Close Stats + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) getStats().close(); cacheImpl->getStatisticsManager().forceSample(); diff --git a/cppcache/src/ThinClientPoolDM.hpp b/cppcache/src/ThinClientPoolDM.hpp index 4e063992a4..ce1f8d83ee 100644 --- a/cppcache/src/ThinClientPoolDM.hpp +++ b/cppcache/src/ThinClientPoolDM.hpp @@ -342,7 +342,7 @@ class FunctionExecution : public PooledWork { m_userAttr = nullptr; } - ~FunctionExecution() {} + ~FunctionExecution() noexcept override = default; std::shared_ptr getException() { return exceptionPtr; } @@ -366,7 +366,7 @@ class FunctionExecution : public PooledWork { m_userAttr = userAttr; } - GfErrType execute(void); + GfErrType execute(void) override; }; class OnRegionFunctionExecution : public PooledWork { @@ -398,7 +398,7 @@ class OnRegionFunctionExecution : public PooledWork { const std::shared_ptr& serverLocation, bool allBuckets); - ~OnRegionFunctionExecution() { + ~OnRegionFunctionExecution() noexcept override { delete m_request; delete m_reply; delete m_resultCollector; @@ -414,7 +414,7 @@ class OnRegionFunctionExecution : public PooledWork { return static_cast(m_resultCollector); } - GfErrType execute(void) { + GfErrType execute(void) override { GuardUserAttributes gua; if (m_userAttr) { diff --git a/cppcache/src/ThinClientPoolHADM.cpp b/cppcache/src/ThinClientPoolHADM.cpp index 7f162a82a8..b940569aa5 100644 --- a/cppcache/src/ThinClientPoolHADM.cpp +++ b/cppcache/src/ThinClientPoolHADM.cpp @@ -26,6 +26,7 @@ namespace apache { namespace geode { namespace client { + const char* ThinClientPoolHADM::NC_Redundancy = "NC Redundancy"; ThinClientPoolHADM::ThinClientPoolHADM(const char* name, std::shared_ptr poolAttr, @@ -92,10 +93,7 @@ GfErrType ThinClientPoolHADM::sendSyncRequest(TcrMessage& request, TcrMessageReply& reply, bool attemptFailover, bool isBGThread) { - GfErrType err = GF_NOERR; - int32_t type = request.getMessageType(); - if ((type == TcrMessage::EXECUTECQ_MSG_TYPE || type == TcrMessage::STOPCQ_MSG_TYPE || type == TcrMessage::CLOSECQ_MSG_TYPE || @@ -106,12 +104,11 @@ GfErrType ThinClientPoolHADM::sendSyncRequest(TcrMessage& request, type == TcrMessage::GETDURABLECQS_MSG_TYPE)) { if (m_destroyPending) return GF_NOERR; reply.setDM(this); - err = sendSyncRequestCq(request, reply); + return sendSyncRequestCq(request, reply); } else { - err = ThinClientPoolDM::sendSyncRequest(request, reply, attemptFailover, - isBGThread); + return ThinClientPoolDM::sendSyncRequest(request, reply, attemptFailover, + isBGThread); } - return err; } bool ThinClientPoolHADM::registerInterestForHARegion( @@ -152,7 +149,6 @@ void ThinClientPoolHADM::redundancy(std::atomic& isRunning) { if (isRunning && !m_connManager.isNetDown()) { m_redundancyManager->maintainRedundancyLevel(); while (m_redundancySema.tryacquire() != -1) { - ; } } } @@ -202,17 +198,15 @@ void ThinClientPoolHADM::sendNotificationCloseMsgs() { GfErrType ThinClientPoolHADM::registerInterestAllRegions( TcrEndpoint* ep, const TcrMessage* request, TcrMessageReply* reply) { GfErrType err = GF_NOERR; - GfErrType opErr = GF_NOERR; std::lock_guard guard(m_regionsLock); - for (std::list::iterator itr = m_regions.begin(); - itr != m_regions.end(); itr++) { - if ((opErr = (*itr)->registerKeys(ep, request, reply)) != GF_NOERR) { - if (err == GF_NOERR) { - err = opErr; - } + for (const auto& region : m_regions) { + auto opErr = region->registerKeys(ep, request, reply); + if (err == GF_NOERR) { + err = opErr; } } + return err; } diff --git a/cppcache/src/ThinClientPoolStickyHADM.hpp b/cppcache/src/ThinClientPoolStickyHADM.hpp index 3c7566b4b4..2cbbd014dc 100644 --- a/cppcache/src/ThinClientPoolStickyHADM.hpp +++ b/cppcache/src/ThinClientPoolStickyHADM.hpp @@ -1,8 +1,3 @@ -#pragma once - -#ifndef GEODE_THINCLIENTPOOLSTICKYHADM_H_ -#define GEODE_THINCLIENTPOOLSTICKYHADM_H_ - /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -19,11 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +#pragma once + +#ifndef GEODE_THINCLIENTPOOLSTICKYHADM_H_ +#define GEODE_THINCLIENTPOOLSTICKYHADM_H_ + #include "ThinClientPoolHADM.hpp" namespace apache { namespace geode { namespace client { + class ThinClientPoolStickyHADM : public ThinClientPoolHADM { public: ThinClientPoolStickyHADM(const char* name, @@ -32,8 +34,9 @@ class ThinClientPoolStickyHADM : public ThinClientPoolHADM { : ThinClientPoolHADM(name, poolAttrs, connManager) { m_sticky = true; } - virtual ~ThinClientPoolStickyHADM() {} + ~ThinClientPoolStickyHADM() override = default; }; + } // namespace client } // namespace geode } // namespace apache diff --git a/cppcache/src/ThinClientRedundancyManager.cpp b/cppcache/src/ThinClientRedundancyManager.cpp index 03ac35474c..432cc4b000 100644 --- a/cppcache/src/ThinClientRedundancyManager.cpp +++ b/cppcache/src/ThinClientRedundancyManager.cpp @@ -14,12 +14,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -/* - * ThinClientRedundancyManager.cpp - * - * Created on: Dec 1, 2008 - * Author: abhaware - */ #include "ThinClientRedundancyManager.hpp" @@ -35,7 +29,6 @@ #include "RemoteQueryService.hpp" #include "ServerLocation.hpp" #include "TcrHADistributionManager.hpp" -#include "ThinClientLocatorHelper.hpp" #include "ThinClientPoolHADM.hpp" #include "ThinClientRegion.hpp" #include "UserAttributes.hpp" @@ -1133,7 +1126,6 @@ void ThinClientRedundancyManager::periodicAck(std::atomic& isRunning) { if (isRunning) { doPeriodicAck(); while (m_periodicAckSema.tryacquire() != -1) { - ; } } } @@ -1169,7 +1161,7 @@ void ThinClientRedundancyManager::doPeriodicAck() { result = m_poolHADM->sendRequestToEP(request, reply, *endpoint); } else { result = (*endpoint)->send(request, reply); - }; + } if (result == GF_NOERR && reply.getMessageType() == TcrMessage::REPLY) { LOGFINE( diff --git a/cppcache/src/ThinClientRegion.cpp b/cppcache/src/ThinClientRegion.cpp index d49b1ac053..4fa88ba83d 100644 --- a/cppcache/src/ThinClientRegion.cpp +++ b/cppcache/src/ThinClientRegion.cpp @@ -18,7 +18,6 @@ #include "ThinClientRegion.hpp" #include -#include #include #include @@ -59,19 +58,15 @@ class PutAllWork : public PooledWork { std::shared_ptr m_serverLocation; TcrMessage* m_request; TcrMessageReply* m_reply; - MapOfUpdateCounters m_mapOfUpdateCounters; bool m_attemptFailover; bool m_isBGThread; std::shared_ptr m_userAttribute; const std::shared_ptr m_region; - std::shared_ptr>> m_keys; std::shared_ptr m_map; std::shared_ptr m_verObjPartListPtr; std::chrono::milliseconds m_timeout; std::shared_ptr m_papException; - bool m_isPapeReceived; ChunkedPutAllResponse* m_resultCollector; - // UNUSED const std::shared_ptr& m_aCallbackArgument; public: PutAllWork(const PutAllWork&) = delete; @@ -90,13 +85,9 @@ class PutAllWork : public PooledWork { m_isBGThread(isBGThread), m_userAttribute(nullptr), m_region(region), - m_keys(keys), m_map(map), m_timeout(timeout), - m_papException(nullptr), - m_isPapeReceived(false) - // UNUSED , m_aCallbackArgument(aCallbackArgument) - { + m_papException(nullptr) { m_request = new TcrMessagePutAll( new DataOutput(m_region->getCache().createDataOutput()), m_region.get(), *m_map, m_timeout, m_poolDM, aCallbackArgument); @@ -118,20 +109,14 @@ class PutAllWork : public PooledWork { m_reply->setChunkedResultHandler(m_resultCollector); } - ~PutAllWork() { + ~PutAllWork() noexcept override { delete m_request; delete m_reply; delete m_resultCollector; } - TcrMessage* getReply() { return m_reply; } - std::shared_ptr getPutAllMap() { return m_map; } - std::shared_ptr getVerObjPartList() { - return m_verObjPartListPtr; - } - ChunkedPutAllResponse* getResultCollector() { return m_resultCollector; } std::shared_ptr getServerLocation() { @@ -143,7 +128,7 @@ class PutAllWork : public PooledWork { } void init() {} - GfErrType execute(void) { + GfErrType execute(void) override { GuardUserAttributes gua; if (m_userAttribute != nullptr) { @@ -175,7 +160,6 @@ class PutAllWork : public PooledWork { // TODO::Check for the PAPException and READ // PutAllPartialResultServerException and set its member for later use. // set m_papException and m_isPapeReceived - m_isPapeReceived = true; if (m_poolDM->isNotAuthorizedException(m_reply->getException())) { LOGDEBUG("received NotAuthorizedException"); err = GF_AUTHENTICATION_FAILED_EXCEPTION; @@ -208,16 +192,13 @@ class RemoveAllWork : public PooledWork { std::shared_ptr m_serverLocation; TcrMessage* m_request; TcrMessageReply* m_reply; - MapOfUpdateCounters m_mapOfUpdateCounters; bool m_attemptFailover; bool m_isBGThread; std::shared_ptr m_userAttribute; const std::shared_ptr m_region; const std::shared_ptr& m_aCallbackArgument; - std::shared_ptr>> m_keys; std::shared_ptr m_verObjPartListPtr; std::shared_ptr m_papException; - bool m_isPapeReceived; ChunkedRemoveAllResponse* m_resultCollector; public: @@ -237,9 +218,7 @@ class RemoveAllWork : public PooledWork { m_userAttribute(nullptr), m_region(region), m_aCallbackArgument(aCallbackArgument), - m_keys(keys), - m_papException(nullptr), - m_isPapeReceived(false) { + m_papException(nullptr) { m_request = new TcrMessageRemoveAll( new DataOutput(m_region->getCache().createDataOutput()), m_region.get(), *keys, m_aCallbackArgument, m_poolDM); @@ -253,23 +232,17 @@ class RemoveAllWork : public PooledWork { m_userAttribute = UserAttributes::threadLocalUserAttributes; } - m_resultCollector = new ChunkedRemoveAllResponse( - m_region, *m_reply, responseLock, m_verObjPartListPtr); + m_resultCollector = + new ChunkedRemoveAllResponse(m_region, *m_reply, m_verObjPartListPtr); m_reply->setChunkedResultHandler(m_resultCollector); } - ~RemoveAllWork() { + ~RemoveAllWork() noexcept override { delete m_request; delete m_reply; delete m_resultCollector; } - TcrMessage* getReply() { return m_reply; } - - std::shared_ptr getVerObjPartList() { - return m_verObjPartListPtr; - } - ChunkedRemoveAllResponse* getResultCollector() { return m_resultCollector; } std::shared_ptr getServerLocation() { @@ -281,7 +254,7 @@ class RemoveAllWork : public PooledWork { } void init() {} - GfErrType execute(void) { + GfErrType execute(void) override { GuardUserAttributes gua; if (m_userAttribute != nullptr) { @@ -313,7 +286,6 @@ class RemoveAllWork : public PooledWork { // TODO::Check for the PAPException and READ // PutAllPartialResultServerException and set its member for later use. // set m_papException and m_isPapeReceived - m_isPapeReceived = true; if (m_poolDM->isNotAuthorizedException(m_reply->getException())) { LOGDEBUG("received NotAuthorizedException"); err = GF_AUTHENTICATION_FAILED_EXCEPTION; @@ -1913,7 +1885,7 @@ GfErrType ThinClientRegion::multiHopRemoveAllNoThrow_remote( std::make_shared(this, responseLock); // need to check ChunkedRemoveAllResponse* resultCollector(new ChunkedRemoveAllResponse( - shared_from_this(), reply, responseLock, versionedObjPartList)); + shared_from_this(), reply, versionedObjPartList)); reply.setChunkedResultHandler(resultCollector); err = m_tcrdm->sendSyncRequest(request, reply); @@ -2878,22 +2850,6 @@ InterestResultPolicy ThinClientRegion::copyInterestList( return interestPolicy; } -void ThinClientRegion::registerInterestGetValues( - const char* method, const std::vector>* keys, - const std::shared_ptr>>& - resultKeys) { - auto exceptions = std::make_shared(); - auto err = getAllNoThrow_remote(keys, nullptr, exceptions, resultKeys, true, - nullptr); - throwExceptionIfError(method, err); - // log any exceptions here - for (const auto& iter : *exceptions) { - LOGWARN("%s Exception for key %s:: %s: %s", method, - Utils::nullSafeToString(iter.first).c_str(), - iter.second->getName().c_str(), iter.second->what()); - } -} - void ThinClientRegion::destroyDM(bool keepEndpoints) { if (m_tcrdm != nullptr) { m_tcrdm->destroy(keepEndpoints); @@ -2910,6 +2866,8 @@ void ThinClientRegion::release(bool invokeCallbacks) { lock.lock(); } + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) destroyDM(invokeCallbacks); m_interestList.clear(); @@ -2928,6 +2886,8 @@ void ThinClientRegion::release(bool invokeCallbacks) { ThinClientRegion::~ThinClientRegion() noexcept { TryWriteGuard guard(m_rwLock, m_destroyPending); if (!m_destroyPending) { + // TODO suspect + // NOLINTNEXTLINE(clang-analyzer-optin.cplusplus.VirtualCall) release(false); } } diff --git a/cppcache/src/ThinClientRegion.hpp b/cppcache/src/ThinClientRegion.hpp index f62bee935c..03d02665a2 100644 --- a/cppcache/src/ThinClientRegion.hpp +++ b/cppcache/src/ThinClientRegion.hpp @@ -321,12 +321,6 @@ class APACHE_GEODE_EXPORT ThinClientRegion : public LocalRegion { const std::shared_ptr& keyPtr, const std::shared_ptr& cvalue, const std::shared_ptr& aCallbackArgument); - // method to get the values for a register interest - void registerInterestGetValues( - const char* method, - const std::vector>* keys, - const std::shared_ptr>>& - resultKeys); GfErrType getNoThrow_FullObject( std::shared_ptr eventId, std::shared_ptr& fullObject, std::shared_ptr& versionTag) override; @@ -380,10 +374,6 @@ class ChunkedInterestResponse : public TcrChunkedResult { TcrMessage& m_replyMsg; std::shared_ptr>> m_resultKeys; - // disabled - ChunkedInterestResponse(const ChunkedInterestResponse&); - ChunkedInterestResponse& operator=(const ChunkedInterestResponse&); - public: inline ChunkedInterestResponse( TcrMessage& msg, @@ -395,15 +385,20 @@ class ChunkedInterestResponse : public TcrChunkedResult { m_replyMsg(replyMsg), m_resultKeys(resultKeys) {} + ChunkedInterestResponse(const ChunkedInterestResponse&) = delete; + ChunkedInterestResponse& operator=(const ChunkedInterestResponse&) = delete; + + ~ChunkedInterestResponse() noexcept override = default; + inline const std::shared_ptr>>& getResultKeys() const { return m_resultKeys; } - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + virtual void reset() override; }; /** @@ -419,16 +414,17 @@ class ChunkedQueryResponse : public TcrChunkedResult { void skipClass(DataInput& input); - // disabled - ChunkedQueryResponse(const ChunkedQueryResponse&); - ChunkedQueryResponse& operator=(const ChunkedQueryResponse&); - public: inline explicit ChunkedQueryResponse(TcrMessage& msg) : TcrChunkedResult(), m_msg(msg), m_queryResults(CacheableVector::create()) {} + ChunkedQueryResponse(const ChunkedQueryResponse&) = delete; + ChunkedQueryResponse& operator=(const ChunkedQueryResponse&) = delete; + + ~ChunkedQueryResponse() noexcept override = default; + inline const std::shared_ptr& getQueryResults() const { return m_queryResults; } @@ -437,10 +433,10 @@ class ChunkedQueryResponse : public TcrChunkedResult { return m_structFieldNames; } - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; void readObjectPartList(DataInput& input, bool isResultSet); }; @@ -458,11 +454,6 @@ class ChunkedFunctionExecutionResponse : public TcrChunkedResult { std::shared_ptr m_rc; std::shared_ptr m_resultCollectorLock; - // disabled - ChunkedFunctionExecutionResponse(const ChunkedFunctionExecutionResponse&); - ChunkedFunctionExecutionResponse& operator=( - const ChunkedFunctionExecutionResponse&); - public: inline ChunkedFunctionExecutionResponse(TcrMessage& msg, bool getResult, std::shared_ptr rc) @@ -477,22 +468,19 @@ class ChunkedFunctionExecutionResponse : public TcrChunkedResult { m_rc(rc), m_resultCollectorLock(resultCollectorLock) {} - /* inline const std::shared_ptr& - getFunctionExecutionResults() const - { - return m_functionExecutionResults; - }*/ + ChunkedFunctionExecutionResponse(const ChunkedFunctionExecutionResponse&) = + delete; + ChunkedFunctionExecutionResponse& operator=( + const ChunkedFunctionExecutionResponse&) = delete; + + ~ChunkedFunctionExecutionResponse() noexcept override = default; - /* adongre - * CID 28805: Parse warning (PW.USELESS_TYPE_QUALIFIER_ON_RETURN_TYPE) - */ - // inline const bool getResult() const inline bool getResult() const { return m_getResult; } - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; }; /** @@ -513,9 +501,6 @@ class ChunkedGetAllResponse : public TcrChunkedResult { bool m_addToLocalCache; uint32_t m_keysOffset; std::recursive_mutex& m_responseLock; - // disabled - ChunkedGetAllResponse(const ChunkedGetAllResponse&); - ChunkedGetAllResponse& operator=(const ChunkedGetAllResponse&); public: inline ChunkedGetAllResponse( @@ -540,10 +525,15 @@ class ChunkedGetAllResponse : public TcrChunkedResult { m_keysOffset(0), m_responseLock(responseLock) {} - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + ChunkedGetAllResponse(const ChunkedGetAllResponse&) = delete; + ChunkedGetAllResponse& operator=(const ChunkedGetAllResponse&) = delete; + + ~ChunkedGetAllResponse() noexcept override = default; + + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; void add(const ChunkedGetAllResponse* other); bool getAddToLocalCache() { return m_addToLocalCache; } @@ -565,9 +555,6 @@ class ChunkedPutAllResponse : public TcrChunkedResult { const std::shared_ptr m_region; std::recursive_mutex& m_responseLock; std::shared_ptr m_list; - // disabled - ChunkedPutAllResponse(const ChunkedPutAllResponse&); - ChunkedPutAllResponse& operator=(const ChunkedPutAllResponse&); public: inline ChunkedPutAllResponse( @@ -580,10 +567,15 @@ class ChunkedPutAllResponse : public TcrChunkedResult { m_responseLock(responseLock), m_list(list) {} - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + ChunkedPutAllResponse(const ChunkedPutAllResponse&) = delete; + ChunkedPutAllResponse& operator=(const ChunkedPutAllResponse&) = delete; + + ~ChunkedPutAllResponse() noexcept override = default; + + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; std::shared_ptr getList() { return m_list; } std::recursive_mutex& getResponseLock() { return m_responseLock; } }; @@ -595,29 +587,24 @@ class ChunkedRemoveAllResponse : public TcrChunkedResult { private: TcrMessage& m_msg; const std::shared_ptr m_region; - std::recursive_mutex& m_responseLock; std::shared_ptr m_list; - // disabled - ChunkedRemoveAllResponse(const ChunkedRemoveAllResponse&); - ChunkedRemoveAllResponse& operator=(const ChunkedRemoveAllResponse&); public: inline ChunkedRemoveAllResponse( const std::shared_ptr& region, TcrMessage& msg, - std::recursive_mutex& responseLock, std::shared_ptr& list) - : TcrChunkedResult(), - m_msg(msg), - m_region(region), - m_responseLock(responseLock), - m_list(list) {} + : TcrChunkedResult(), m_msg(msg), m_region(region), m_list(list) {} + + ChunkedRemoveAllResponse(const ChunkedRemoveAllResponse&) = delete; + ChunkedRemoveAllResponse& operator=(const ChunkedRemoveAllResponse&) = delete; - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + ~ChunkedRemoveAllResponse() noexcept override = default; + + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; std::shared_ptr getList() { return m_list; } - std::recursive_mutex& getResponseLock() { return m_responseLock; } }; /** @@ -631,10 +618,6 @@ class ChunkedKeySetResponse : public TcrChunkedResult { TcrMessage& m_replyMsg; std::vector>& m_resultKeys; - // disabled - ChunkedKeySetResponse(const ChunkedKeySetResponse&); - ChunkedKeySetResponse& operator=(const ChunkedKeySetResponse&); - public: inline ChunkedKeySetResponse( TcrMessage& msg, std::vector>& resultKeys, @@ -644,10 +627,15 @@ class ChunkedKeySetResponse : public TcrChunkedResult { m_replyMsg(replyMsg), m_resultKeys(resultKeys) {} - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + ChunkedKeySetResponse(const ChunkedKeySetResponse&) = delete; + ChunkedKeySetResponse& operator=(const ChunkedKeySetResponse&) = delete; + + ~ChunkedKeySetResponse() noexcept override = default; + + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; }; class ChunkedDurableCQListResponse : public TcrChunkedResult { @@ -655,10 +643,6 @@ class ChunkedDurableCQListResponse : public TcrChunkedResult { TcrMessage& m_msg; std::shared_ptr m_resultList; - // disabled - ChunkedDurableCQListResponse(const ChunkedDurableCQListResponse&); - ChunkedDurableCQListResponse& operator=(const ChunkedDurableCQListResponse&); - public: inline explicit ChunkedDurableCQListResponse(TcrMessage& msg) : TcrChunkedResult(), @@ -668,10 +652,16 @@ class ChunkedDurableCQListResponse : public TcrChunkedResult { return m_resultList; } - virtual void handleChunk(const uint8_t* chunk, int32_t chunkLen, - uint8_t isLastChunkWithSecurity, - const CacheImpl* cacheImpl); - virtual void reset(); + ChunkedDurableCQListResponse(const ChunkedDurableCQListResponse&) = delete; + ChunkedDurableCQListResponse& operator=(const ChunkedDurableCQListResponse&) = + delete; + + ~ChunkedDurableCQListResponse() noexcept override = default; + + void handleChunk(const uint8_t* chunk, int32_t chunkLen, + uint8_t isLastChunkWithSecurity, + const CacheImpl* cacheImpl) override; + void reset() override; }; } // namespace client diff --git a/cppcache/src/ThreadPool.hpp b/cppcache/src/ThreadPool.hpp index d72f12a786..a534bb0ed4 100644 --- a/cppcache/src/ThreadPool.hpp +++ b/cppcache/src/ThreadPool.hpp @@ -50,7 +50,7 @@ class PooledWork : public Callable { public: PooledWork() : m_mutex(), m_cond(), m_done(false) {} - ~PooledWork() override {} + ~PooledWork() noexcept override = default; void call() override { T res = execute(); diff --git a/cppcache/src/TrackedMapEntry.hpp b/cppcache/src/TrackedMapEntry.hpp index 7767b7e293..42fb357e89 100644 --- a/cppcache/src/TrackedMapEntry.hpp +++ b/cppcache/src/TrackedMapEntry.hpp @@ -36,7 +36,7 @@ class TrackedMapEntry final : public MapEntry { m_trackingNumber(trackingNumber), m_updateCount(updateCount) {} - virtual ~TrackedMapEntry() {} + ~TrackedMapEntry() noexcept override = default; std::shared_ptr getImplPtr() final { return m_entry; } diff --git a/cppcache/src/VersionStamp.hpp b/cppcache/src/VersionStamp.hpp index 211641379b..47ee64709c 100644 --- a/cppcache/src/VersionStamp.hpp +++ b/cppcache/src/VersionStamp.hpp @@ -31,10 +31,11 @@ namespace apache { namespace geode { namespace client { + /** * @brief This class encapsulates Version Stamp for map entries. */ -class APACHE_GEODE_EXPORT VersionStamp { +class VersionStamp { public: VersionStamp() : m_memberID(0), @@ -50,7 +51,7 @@ class APACHE_GEODE_EXPORT VersionStamp { m_regionVersionHighBytes(rhs.m_regionVersionHighBytes), m_regionVersionLowBytes(rhs.m_regionVersionLowBytes) {} - virtual ~VersionStamp() {} + virtual ~VersionStamp() noexcept = default; void setVersions(std::shared_ptr versionTag); void setVersions(VersionStamp& versionStamp); int32_t getEntryVersion() const; diff --git a/cppcache/src/VersionedCacheableObjectPartList.cpp b/cppcache/src/VersionedCacheableObjectPartList.cpp index 15b8ea3f39..7190424cfc 100644 --- a/cppcache/src/VersionedCacheableObjectPartList.cpp +++ b/cppcache/src/VersionedCacheableObjectPartList.cpp @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + #include "VersionedCacheableObjectPartList.hpp" #include @@ -23,6 +24,7 @@ #include "DiskStoreId.hpp" #include "DiskVersionTag.hpp" #include "ThinClientRegion.hpp" + namespace apache { namespace geode { namespace client { @@ -181,7 +183,6 @@ void VersionedCacheableObjectPartList::fromData(DataInput& input) { if (m_hasTags) { len = static_cast(input.readUnsignedVL()); - ; m_versionTags.resize(len); std::vector ids; MemberListForVersionStamp& memberListForVersionStamp = diff --git a/cppcache/src/statistics/OsStatisticsImpl.cpp b/cppcache/src/statistics/OsStatisticsImpl.cpp index 63f3ca3444..a55b780998 100644 --- a/cppcache/src/statistics/OsStatisticsImpl.cpp +++ b/cppcache/src/statistics/OsStatisticsImpl.cpp @@ -85,10 +85,6 @@ OsStatisticsImpl::OsStatisticsImpl(StatisticsType* typeArg, this->numericId = calcNumericId(system, numericIdArg); this->uniqueId = uniqueIdArg; this->closed = false; - ; - /* adongre - * CID 28981: Uninitialized pointer field (UNINIT_CTOR) - */ doubleStorage = nullptr; intStorage = nullptr; longStorage = nullptr; @@ -125,22 +121,18 @@ OsStatisticsImpl::OsStatisticsImpl(StatisticsType* typeArg, } OsStatisticsImpl::~OsStatisticsImpl() noexcept { - try { - statsType = nullptr; - if (intStorage != nullptr) { - delete[] intStorage; - intStorage = nullptr; - } - if (longStorage != nullptr) { - delete[] longStorage; - longStorage = nullptr; - } - if (doubleStorage != nullptr) { - delete[] doubleStorage; - doubleStorage = nullptr; - } - } catch (...) { - LOGERROR("Exception in ~OsStatisticsImpl"); + statsType = nullptr; + if (intStorage != nullptr) { + delete[] intStorage; + intStorage = nullptr; + } + if (longStorage != nullptr) { + delete[] longStorage; + longStorage = nullptr; + } + if (doubleStorage != nullptr) { + delete[] doubleStorage; + doubleStorage = nullptr; } } diff --git a/cppcache/test/CacheableDateTest.cpp b/cppcache/test/CacheableDateTest.cpp index a22774e141..e133551a27 100644 --- a/cppcache/test/CacheableDateTest.cpp +++ b/cppcache/test/CacheableDateTest.cpp @@ -26,14 +26,14 @@ using apache::geode::client::CacheableDate; -TEST(CacheableDateTest, constructFrom_time_t_WithDefault) { +TEST(CacheableDateTest, constructFromTimeTWithDefault) { const CacheableDate cacheableDate; EXPECT_EQ(0, cacheableDate.milliseconds()); EXPECT_EQ(0, static_cast(cacheableDate)); } -TEST(CacheableDateTest, constructFrom_time_t) { +TEST(CacheableDateTest, constructFromTimeT) { time_t time = 0; std::time(&time); CacheableDate cacheableDate(time); @@ -42,7 +42,7 @@ TEST(CacheableDateTest, constructFrom_time_t) { EXPECT_EQ(time, static_cast(cacheableDate)); } -TEST(CacheableDateTest, constructFrom_time_point) { +TEST(CacheableDateTest, constructFromTimePoint) { const auto timePoint = CacheableDate::clock::now(); const CacheableDate cacheableDate(timePoint); @@ -61,7 +61,7 @@ TEST(CacheableDateTest, constructFrom_time_point) { EXPECT_EQ(time, static_cast(cacheableDate)); } -TEST(CacheableDateTest, constructFrom_duration) { +TEST(CacheableDateTest, constructFromDuration) { const auto duration = CacheableDate::duration(1000); const CacheableDate cacheableDate(duration); diff --git a/cppcache/test/CacheableKeyCreateTests.cpp b/cppcache/test/CacheableKeyCreateTests.cpp index 901671fdf1..6216e97c86 100644 --- a/cppcache/test/CacheableKeyCreateTests.cpp +++ b/cppcache/test/CacheableKeyCreateTests.cpp @@ -36,7 +36,7 @@ using apache::geode::client::CacheableInt64; using apache::geode::client::CacheableKey; using apache::geode::client::CacheableString; -TEST(CacheableKeyCreateTests, forArrayOf_constchar) { +TEST(CacheableKeyCreateTests, forArrayOfConstChar) { const auto cacheableKey = CacheableKey::create("test"); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -45,7 +45,7 @@ TEST(CacheableKeyCreateTests, forArrayOf_constchar) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, forArrayOf_char) { +TEST(CacheableKeyCreateTests, forArrayOfChar) { char* test = new char[5]{'t', 'e', 's', 't', '\0'}; const auto cacheableKey = CacheableKey::create(test); ASSERT_TRUE(nullptr != cacheableKey); @@ -55,7 +55,7 @@ TEST(CacheableKeyCreateTests, forArrayOf_char) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, forArrayOf_char16_t) { +TEST(CacheableKeyCreateTests, forArrayOfChar16) { const auto cacheableKey = CacheableKey::create(u"test"); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -64,7 +64,7 @@ TEST(CacheableKeyCreateTests, forArrayOf_char16_t) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, forArrayOf_char32_t) { +TEST(CacheableKeyCreateTests, forArrayOfChar32) { const auto cacheableKey = CacheableKey::create(U"test"); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -73,7 +73,7 @@ TEST(CacheableKeyCreateTests, forArrayOf_char32_t) { ASSERT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, forArrayOf_wchar_t) { +TEST(CacheableKeyCreateTests, forArrayOfWchar) { const auto cacheableKey = CacheableKey::create(L"test"); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -82,7 +82,7 @@ TEST(CacheableKeyCreateTests, forArrayOf_wchar_t) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, for_string) { +TEST(CacheableKeyCreateTests, forString) { const auto cacheableKey = CacheableKey::create(std::string("test")); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -91,7 +91,7 @@ TEST(CacheableKeyCreateTests, for_string) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, for_u16string) { +TEST(CacheableKeyCreateTests, forU16string) { const auto cacheableKey = CacheableKey::create(std::u16string(u"test")); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -100,7 +100,7 @@ TEST(CacheableKeyCreateTests, for_u16string) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, for_u32string) { +TEST(CacheableKeyCreateTests, forU32string) { const auto cacheableKey = CacheableKey::create(std::u32string(U"test")); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -109,7 +109,7 @@ TEST(CacheableKeyCreateTests, for_u32string) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, for_wstring) { +TEST(CacheableKeyCreateTests, forWstring) { const auto cacheableKey = CacheableKey::create(std::wstring(L"test")); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableString = @@ -118,7 +118,7 @@ TEST(CacheableKeyCreateTests, for_wstring) { EXPECT_EQ(cacheableString->value(), "test"); } -TEST(CacheableKeyCreateTests, for_int8_t) { +TEST(CacheableKeyCreateTests, forInt8) { const auto cacheableKey = CacheableKey::create(static_cast(1)); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableByte = std::dynamic_pointer_cast(cacheableKey); @@ -126,7 +126,7 @@ TEST(CacheableKeyCreateTests, for_int8_t) { EXPECT_EQ(cacheableByte->value(), 1); } -TEST(CacheableKeyCreateTests, for_int16_t) { +TEST(CacheableKeyCreateTests, forInt16) { const auto cacheableKey = CacheableKey::create(static_cast(1)); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableInt16 = @@ -135,7 +135,7 @@ TEST(CacheableKeyCreateTests, for_int16_t) { ASSERT_EQ(cacheableInt16->value(), 1); } -TEST(CacheableKeyCreateTests, for_int32_t) { +TEST(CacheableKeyCreateTests, forInt32) { const auto cacheableKey = CacheableKey::create(static_cast(1)); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableInt32 = @@ -144,7 +144,7 @@ TEST(CacheableKeyCreateTests, for_int32_t) { ASSERT_EQ(cacheableInt32->value(), 1); } -TEST(CacheableKeyCreateTests, for_int64_t) { +TEST(CacheableKeyCreateTests, forInt64) { const auto cacheableKey = CacheableKey::create(static_cast(1)); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableInt64 = @@ -153,7 +153,7 @@ TEST(CacheableKeyCreateTests, for_int64_t) { EXPECT_EQ(cacheableInt64->value(), 1); } -TEST(CacheableKeyCreateTests, for_char16_t) { +TEST(CacheableKeyCreateTests, forChar16) { const auto cacheableKey = CacheableKey::create(u'a'); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableCharacter = @@ -162,7 +162,7 @@ TEST(CacheableKeyCreateTests, for_char16_t) { EXPECT_EQ(cacheableCharacter->value(), u'a'); } -TEST(CacheableKeyCreateTests, for_float) { +TEST(CacheableKeyCreateTests, forFloat) { const auto cacheableKey = CacheableKey::create(1.1f); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableFloat = @@ -171,7 +171,7 @@ TEST(CacheableKeyCreateTests, for_float) { EXPECT_EQ(cacheableFloat->value(), 1.1f); } -TEST(CacheableKeyCreateTests, for_double) { +TEST(CacheableKeyCreateTests, forDouble) { const auto cacheableKey = CacheableKey::create(1.1); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableDouble = @@ -180,7 +180,7 @@ TEST(CacheableKeyCreateTests, for_double) { EXPECT_EQ(cacheableDouble->value(), 1.1); } -TEST(CacheableKeyCreateTests, for_bool) { +TEST(CacheableKeyCreateTests, forBool) { const auto cacheableKey = CacheableKey::create(true); ASSERT_TRUE(nullptr != cacheableKey); auto&& cacheableBoolean = @@ -189,7 +189,7 @@ TEST(CacheableKeyCreateTests, for_bool) { EXPECT_EQ(cacheableBoolean->value(), true); } -TEST(CacheableKeyCreateTests, for_timepoint) { +TEST(CacheableKeyCreateTests, forTimepoint) { auto time = std::chrono::system_clock::now(); const auto cacheableKey = CacheableKey::create(time); ASSERT_TRUE(nullptr != cacheableKey); diff --git a/cppcache/test/CacheableKeysTest.cpp b/cppcache/test/CacheableKeysTest.cpp index 58167d9608..1b5a1cac8f 100644 --- a/cppcache/test/CacheableKeysTest.cpp +++ b/cppcache/test/CacheableKeysTest.cpp @@ -26,25 +26,25 @@ TEST(CacheableKeysTest, boolDifferentHashCodes) { << "Two different bool values have different hashcodes"; } -TEST(CacheableKeysTest, int8_tDifferentHashCodes) { +TEST(CacheableKeysTest, int8DifferentHashCodes) { EXPECT_NE(hashcode(static_cast(37)), hashcode(static_cast(42))) << "Two different int8_t values have different hashcodes"; } -TEST(CacheableKeysTest, int16_tDifferentHashCodes) { +TEST(CacheableKeysTest, int16DifferentHashCodes) { EXPECT_NE(hashcode(static_cast(37)), hashcode(static_cast(42))) << "Two different int16_t values have different hashcodes"; } -TEST(CacheableKeysTest, int32_tDifferentHashCodes) { +TEST(CacheableKeysTest, int32DifferentHashCodes) { EXPECT_NE(hashcode(static_cast(37)), hashcode(static_cast(42))) << "Two different int32_t values have different hashcodes"; } -TEST(CacheableKeysTest, int64_tDifferentHashCodes) { +TEST(CacheableKeysTest, int64DifferentHashCodes) { EXPECT_NE(hashcode(static_cast(37)), hashcode(static_cast(42))) << "Two different int64_t values have different hashcodes"; diff --git a/cppcache/test/ChunkedHeaderTest.cpp b/cppcache/test/ChunkedHeaderTest.cpp index 767c8fcb3e..a1280e9233 100644 --- a/cppcache/test/ChunkedHeaderTest.cpp +++ b/cppcache/test/ChunkedHeaderTest.cpp @@ -34,7 +34,7 @@ namespace { class TcrMessageTestFixture : public TcrMessage { public: TcrMessageTestFixture() : TcrMessage() {} - virtual ~TcrMessageTestFixture() {} + ~TcrMessageTestFixture() noexcept override = default; }; } // namespace diff --git a/cppcache/test/DataInputTest.cpp b/cppcache/test/DataInputTest.cpp index 27d22e83d3..1ac81cca35 100644 --- a/cppcache/test/DataInputTest.cpp +++ b/cppcache/test/DataInputTest.cpp @@ -62,9 +62,7 @@ class TestDataInput { public: explicit TestDataInput(const char *str) : m_byteArray(ByteArray::fromString(str)), - m_dataInput(m_byteArray.get(), m_byteArray.size()) { - // NOP - } + m_dataInput(m_byteArray.get(), m_byteArray.size()) {} void read(uint8_t *value) { *value = m_dataInput.read(); } @@ -258,8 +256,16 @@ TEST_F(DataInputTest, CanReadUTFWithAnUTFStringInput) { DataOutputInternal stream; stream.writeUTF(expectedString); - DataInputUnderTest dataInput(stream.getBufferCopy(), - stream.getBufferLength()); + size_t outputBufferSize; + auto outputBuffer = stream.getBuffer(&outputBufferSize); + + auto buffer = std::unique_ptr{ + reinterpret_cast( + std::malloc(sizeof(uint8_t) * outputBufferSize)), + std::free}; + std::memcpy(buffer.get(), outputBuffer, outputBufferSize); + + DataInputUnderTest dataInput(buffer.get(), stream.getBufferLength()); auto actualString = dataInput.readUTF(); EXPECT_TRUE(expectedString == actualString); @@ -315,7 +321,7 @@ TEST_F(DataInputTest, TestReadBoolean) { EXPECT_EQ(true, value) << "Correct bool"; } -TEST_F(DataInputTest, TestReadUint8_tBytesOnly) { +TEST_F(DataInputTest, TestReadUint8BytesOnly) { TestDataInput dataInput("BABEFACE"); uint8_t buffer[4]; ::memset(buffer, 0U, 4 * sizeof(uint8_t)); @@ -326,7 +332,7 @@ TEST_F(DataInputTest, TestReadUint8_tBytesOnly) { EXPECT_EQ(static_cast(206U), buffer[3]) << "Correct third uint8_t"; } -TEST_F(DataInputTest, TestReadInt8_tBytesOnly) { +TEST_F(DataInputTest, TestReadInt8BytesOnly) { TestDataInput dataInput("DEADBEEF"); int8_t buffer[4]; ::memset(buffer, 0, 4 * sizeof(int8_t)); @@ -337,32 +343,38 @@ TEST_F(DataInputTest, TestReadInt8_tBytesOnly) { EXPECT_EQ(static_cast(-17), buffer[3]) << "Correct third int8_t"; } -TEST_F(DataInputTest, TestReadUint8_tBytes) { +TEST_F(DataInputTest, TestReadUint8Bytes) { TestDataInput dataInput("04BABEFACE"); uint8_t *buffer = nullptr; int32_t len = 0; dataInput.readBytes(&buffer, &len); - EXPECT_NE(static_cast(nullptr), buffer) << "Non-null buffer"; + auto bufferGuard = std::unique_ptr{buffer}; + ASSERT_NE(nullptr, buffer) << "Non-null buffer"; ASSERT_EQ(4, len) << "Correct length"; - EXPECT_EQ(static_cast(186U), buffer[0]) << "Correct zeroth uint8_t"; - EXPECT_EQ(static_cast(190U), buffer[1]) << "Correct first uint8_t"; - EXPECT_EQ(static_cast(250U), buffer[2]) << "Correct second uint8_t"; - EXPECT_EQ(static_cast(206U), buffer[3]) << "Correct third uint8_t"; - _GEODE_SAFE_DELETE_ARRAY(buffer); + if (buffer) { + EXPECT_EQ(static_cast(186U), buffer[0]) + << "Correct zeroth uint8_t"; + EXPECT_EQ(static_cast(190U), buffer[1]) << "Correct first uint8_t"; + EXPECT_EQ(static_cast(250U), buffer[2]) + << "Correct second uint8_t"; + EXPECT_EQ(static_cast(206U), buffer[3]) << "Correct third uint8_t"; + } } -TEST_F(DataInputTest, TestReadInt8_tBytes) { +TEST_F(DataInputTest, TestReadInt8Bytes) { TestDataInput dataInput("04DEADBEEF"); int8_t *buffer = nullptr; int32_t len = 0; dataInput.readBytes(&buffer, &len); - EXPECT_NE(static_cast(nullptr), buffer) << "Non-null buffer"; + auto bufferGuard = std::unique_ptr{buffer}; + ASSERT_NE(nullptr, buffer) << "Non-null buffer"; ASSERT_EQ(4, len) << "Correct length"; - EXPECT_EQ(static_cast(-34), buffer[0]) << "Correct zeroth int8_t"; - EXPECT_EQ(static_cast(-83), buffer[1]) << "Correct first int8_t"; - EXPECT_EQ(static_cast(-66), buffer[2]) << "Correct second int8_t"; - EXPECT_EQ(static_cast(-17), buffer[3]) << "Correct third int8_t"; - _GEODE_SAFE_DELETE_ARRAY(buffer); + if (buffer) { + EXPECT_EQ(static_cast(-34), buffer[0]) << "Correct zeroth int8_t"; + EXPECT_EQ(static_cast(-83), buffer[1]) << "Correct first int8_t"; + EXPECT_EQ(static_cast(-66), buffer[2]) << "Correct second int8_t"; + EXPECT_EQ(static_cast(-17), buffer[3]) << "Correct third int8_t"; + } } TEST_F(DataInputTest, TestReadIntUint16) { @@ -590,22 +602,25 @@ TEST_F(DataInputTest, TestReadArrayOfByteArrays) { int32_t *elementLength = nullptr; dataInput.readArrayOfByteArrays(&arrayOfByteArrays, arrayLength, &elementLength); - EXPECT_NE(static_cast(nullptr), arrayOfByteArrays) - << "Non-null array of byte arrays"; + auto arrayOfByteArraysGuard = + std::unique_ptr { arrayOfByteArrays }; + auto elementLengthGuard = std::unique_ptr{elementLength}; + + ASSERT_NE(nullptr, arrayOfByteArrays) << "Non-null array of byte arrays"; ASSERT_EQ(1, arrayLength) << "Correct array length"; - EXPECT_NE(static_cast(nullptr), arrayOfByteArrays[0]) - << "Non-null first byte array"; - ASSERT_EQ(4, elementLength[0]) << "Correct length"; - EXPECT_EQ(static_cast(-34), arrayOfByteArrays[0][0]) - << "Correct zeroth int8_t"; - EXPECT_EQ(static_cast(-83), arrayOfByteArrays[0][1]) - << "Correct first int8_t"; - EXPECT_EQ(static_cast(-66), arrayOfByteArrays[0][2]) - << "Correct second int8_t"; - EXPECT_EQ(static_cast(-17), arrayOfByteArrays[0][3]) - << "Correct third int8_t"; - _GEODE_SAFE_DELETE_ARRAY(elementLength); - _GEODE_SAFE_DELETE_ARRAY(arrayOfByteArrays); + if (arrayOfByteArrays) { + EXPECT_NE(static_cast(nullptr), arrayOfByteArrays[0]) + << "Non-null first byte array"; + ASSERT_EQ(4, elementLength[0]) << "Correct length"; + EXPECT_EQ(static_cast(-34), arrayOfByteArrays[0][0]) + << "Correct zeroth int8_t"; + EXPECT_EQ(static_cast(-83), arrayOfByteArrays[0][1]) + << "Correct first int8_t"; + EXPECT_EQ(static_cast(-66), arrayOfByteArrays[0][2]) + << "Correct second int8_t"; + EXPECT_EQ(static_cast(-17), arrayOfByteArrays[0][3]) + << "Correct third int8_t"; + } } TEST_F(DataInputTest, TestGetBytesRead) { diff --git a/cppcache/test/DataOutputTest.cpp b/cppcache/test/DataOutputTest.cpp index f3017d5b01..29e3982eab 100644 --- a/cppcache/test/DataOutputTest.cpp +++ b/cppcache/test/DataOutputTest.cpp @@ -71,29 +71,16 @@ class TestDataOutput : public DataOutputInternal { class DataOutputTest : public ::testing::Test, public ByteArrayFixture { public: - DataOutputTest() : m_mersennesTwister(m_randomDevice()) { - // NOP - } - - virtual ~DataOutputTest() { - // NOP - } + DataOutputTest() : randomEngine_(randomDevice_()) {} + ~DataOutputTest() noexcept override = default; protected: - std::random_device m_randomDevice; - std::mt19937 m_mersennesTwister; + std::random_device randomDevice_; + std::default_random_engine randomEngine_; + std::uniform_int_distribution random_; int32_t getRandomSequenceNumber() { - // One would normally just use std::uniform_int_distribution but gcc 4.4.7 - // is lacking. - const std::mt19937::result_type upperLimit = - static_cast( - std::numeric_limits::max()); - std::mt19937::result_type result; - while (upperLimit < (result = m_mersennesTwister())) { - // Try again. - } - return static_cast(result); + return static_cast(random_(randomEngine_)); } }; diff --git a/cppcache/test/PdxInstanceImplTest.cpp b/cppcache/test/PdxInstanceImplTest.cpp index 604b46f64e..d54f3e7462 100644 --- a/cppcache/test/PdxInstanceImplTest.cpp +++ b/cppcache/test/PdxInstanceImplTest.cpp @@ -51,8 +51,8 @@ using apache::geode::statistics::StatisticsFactory; // TEST(PdxInstanceImplTest, updatePdxStream) { auto properties = std::make_shared(); - CacheFactory cacheFactory; - auto cache = cacheFactory.create(); + properties->insert("log-level", "none"); + auto cache = CacheFactory{}.set("log-level", "none").create(); CacheImpl cacheImpl(&cache, properties, true, false, nullptr); auto buffer = std::vector(__1M__, 0xcc); auto len = static_cast(buffer.size()); diff --git a/cppcache/test/SerializableCreateTests.cpp b/cppcache/test/SerializableCreateTests.cpp index aa171683b9..fcf241e768 100644 --- a/cppcache/test/SerializableCreateTests.cpp +++ b/cppcache/test/SerializableCreateTests.cpp @@ -36,7 +36,7 @@ using apache::geode::client::CacheableInt64; using apache::geode::client::CacheableString; using apache::geode::client::Serializable; -TEST(SerializableCreateTests, forArrayOf_constchar) { +TEST(SerializableCreateTests, forArrayOfConstChar) { const auto serializable = Serializable::create("test"); ASSERT_TRUE(nullptr != serializable); auto&& cacheableString = @@ -45,7 +45,7 @@ TEST(SerializableCreateTests, forArrayOf_constchar) { ASSERT_EQ(cacheableString->value(), "test"); } -TEST(SerializableCreateTests, forArrayOf_char) { +TEST(SerializableCreateTests, forArrayOfChar) { char* test = new char[5]{'t', 'e', 's', 't', '\0'}; const auto serializable = Serializable::create(test); ASSERT_TRUE(nullptr != serializable); @@ -55,7 +55,7 @@ TEST(SerializableCreateTests, forArrayOf_char) { ASSERT_EQ(cacheableString->value(), "test"); } -TEST(SerializableCreateTests, forArrayOf_char16_t) { +TEST(SerializableCreateTests, forArrayOfChar16) { const auto serializable = Serializable::create(u"test"); ASSERT_TRUE(nullptr != serializable); auto&& cacheableString = @@ -64,7 +64,7 @@ TEST(SerializableCreateTests, forArrayOf_char16_t) { ASSERT_EQ(cacheableString->value(), "test"); } -TEST(SerializableCreateTests, forArrayOf_char32_t) { +TEST(SerializableCreateTests, forArrayOfChar32) { const auto serializable = Serializable::create(U"test"); ASSERT_TRUE(nullptr != serializable); auto&& cacheableString = @@ -73,7 +73,7 @@ TEST(SerializableCreateTests, forArrayOf_char32_t) { ASSERT_EQ(cacheableString->value(), "test"); } -TEST(SerializableCreateTests, forArrayOf_wchar_t) { +TEST(SerializableCreateTests, forArrayOfWchar) { const auto serializable = Serializable::create(L"test"); ASSERT_TRUE(nullptr != serializable); auto&& cacheableString = @@ -82,7 +82,7 @@ TEST(SerializableCreateTests, forArrayOf_wchar_t) { ASSERT_EQ(cacheableString->value(), "test"); } -TEST(SerializableCreateTests, for_int8_t) { +TEST(SerializableCreateTests, forInt8) { const auto serializable = Serializable::create(static_cast(1)); ASSERT_TRUE(nullptr != serializable); auto&& cacheableByte = std::dynamic_pointer_cast(serializable); @@ -90,7 +90,7 @@ TEST(SerializableCreateTests, for_int8_t) { ASSERT_EQ(cacheableByte->value(), 1); } -TEST(SerializableCreateTests, for_int16_t) { +TEST(SerializableCreateTests, forInt16) { const auto serializable = Serializable::create(static_cast(1)); ASSERT_TRUE(nullptr != serializable); auto&& cacheableInt16 = @@ -99,7 +99,7 @@ TEST(SerializableCreateTests, for_int16_t) { ASSERT_EQ(cacheableInt16->value(), 1); } -TEST(SerializableCreateTests, for_int32_t) { +TEST(SerializableCreateTests, forInt32) { const auto serializable = Serializable::create(static_cast(1)); ASSERT_TRUE(nullptr != serializable); auto&& cacheableInt32 = @@ -108,7 +108,7 @@ TEST(SerializableCreateTests, for_int32_t) { ASSERT_EQ(cacheableInt32->value(), 1); } -TEST(SerializableCreateTests, for_int64_t) { +TEST(SerializableCreateTests, forInt64) { const auto serializable = Serializable::create(static_cast(1)); ASSERT_TRUE(nullptr != serializable); auto&& cacheableInt64 = @@ -117,7 +117,7 @@ TEST(SerializableCreateTests, for_int64_t) { ASSERT_EQ(cacheableInt64->value(), 1); } -TEST(SerializableCreateTests, for_char16_t) { +TEST(SerializableCreateTests, forCr16) { const auto serializable = Serializable::create(u'a'); ASSERT_TRUE(nullptr != serializable); auto&& cacheableCharacter = @@ -126,7 +126,7 @@ TEST(SerializableCreateTests, for_char16_t) { ASSERT_EQ(cacheableCharacter->value(), u'a'); } -TEST(SerializableCreateTests, for_float) { +TEST(SerializableCreateTests, forFloat) { const auto serializable = Serializable::create(1.1f); ASSERT_TRUE(nullptr != serializable); auto&& cacheableFloat = @@ -135,7 +135,7 @@ TEST(SerializableCreateTests, for_float) { ASSERT_EQ(cacheableFloat->value(), 1.1f); } -TEST(SerializableCreateTests, for_double) { +TEST(SerializableCreateTests, forDouble) { const auto serializable = Serializable::create(1.1); ASSERT_TRUE(nullptr != serializable); auto&& cacheableDouble = @@ -144,7 +144,7 @@ TEST(SerializableCreateTests, for_double) { ASSERT_EQ(cacheableDouble->value(), 1.1); } -TEST(SerializableCreateTests, for_bool) { +TEST(SerializableCreateTests, forBool) { const auto serializable = Serializable::create(true); ASSERT_TRUE(nullptr != serializable); auto&& cacheableBoolean = @@ -153,7 +153,7 @@ TEST(SerializableCreateTests, for_bool) { ASSERT_EQ(cacheableBoolean->value(), true); } -TEST(SerializableCreateTests, for_timepoint) { +TEST(SerializableCreateTests, forTimepoint) { auto time = std::chrono::system_clock::now(); const auto serializable = Serializable::create(time); ASSERT_TRUE(nullptr != serializable); diff --git a/cppcache/test/TcrMessageTest.cpp b/cppcache/test/TcrMessageTest.cpp index cfe200411d..1f811bd165 100644 --- a/cppcache/test/TcrMessageTest.cpp +++ b/cppcache/test/TcrMessageTest.cpp @@ -78,7 +78,7 @@ TEST_F(TcrMessageTest, intializeDefaultConstructor) { EXPECT_EQ(TcrMessage::INVALID, message.getMessageType()); } -TEST_F(TcrMessageTest, testConstructor1MessageDataContentWithDESTROY_REGION) { +TEST_F(TcrMessageTest, testConstructor1MessageDataContentWithDestoryRegion) { using apache::geode::client::TcrMessageDestroyRegion; const Region *region = nullptr; @@ -99,7 +99,7 @@ TEST_F(TcrMessageTest, testConstructor1MessageDataContentWithDESTROY_REGION) { message); } -TEST_F(TcrMessageTest, testConstructor1MessageDataContentWithCLEAR_REGION) { +TEST_F(TcrMessageTest, testConstructor1MessageDataContentWithClearRegion) { using apache::geode::client::TcrMessageClearRegion; const Region *region = nullptr; @@ -135,7 +135,7 @@ TEST_F(TcrMessageTest, testQueryConstructorMessageDataContent) { message); } -TEST_F(TcrMessageTest, testQueryConstructorWithQUERY) { +TEST_F(TcrMessageTest, testQueryConstructorWithQuery) { using apache::geode::client::TcrMessageQuery; std::chrono::milliseconds messageResponseTimeout{1000}; @@ -152,7 +152,7 @@ TEST_F(TcrMessageTest, testQueryConstructorWithQUERY) { message); } -TEST_F(TcrMessageTest, testQueryConstructorWithSTOPCQ_MSG_TYPE) { +TEST_F(TcrMessageTest, testQueryConstructorWithStopCq) { using apache::geode::client::TcrMessageStopCQ; std::chrono::milliseconds messageResponseTimeout{1000}; @@ -169,7 +169,7 @@ TEST_F(TcrMessageTest, testQueryConstructorWithSTOPCQ_MSG_TYPE) { message); } -TEST_F(TcrMessageTest, testQueryConstructorWithCLOSECQ_MSG_TYPE) { +TEST_F(TcrMessageTest, testQueryConstructorWithCloseCq) { using apache::geode::client::TcrMessageCloseCQ; std::chrono::milliseconds messageResponseTimeout{1000}; @@ -187,7 +187,7 @@ TEST_F(TcrMessageTest, testQueryConstructorWithCLOSECQ_MSG_TYPE) { } TEST_F(TcrMessageTest, - testParameterizedQueryConstructorWithQUERY_WITH_PARAMETERS) { + testParameterizedQueryConstructorWithQueryWithParameters) { using apache::geode::client::TcrMessageQueryWithParameters; std::chrono::milliseconds messageResponseTimeout{1000}; @@ -207,7 +207,7 @@ TEST_F(TcrMessageTest, message); } -TEST_F(TcrMessageTest, testConstructorWithCONTAINS_KEY) { +TEST_F(TcrMessageTest, testConstructorWithContainsKey) { using apache::geode::client::TcrMessageContainsKey; TcrMessageContainsKey message( @@ -226,7 +226,7 @@ TEST_F(TcrMessageTest, testConstructorWithCONTAINS_KEY) { message); } -TEST_F(TcrMessageTest, testConstructorWithGETDURABLECQS_MSG_TYPE) { +TEST_F(TcrMessageTest, testConstructorWithGetDurableCqs) { using apache::geode::client::TcrMessageGetDurableCqs; TcrMessageGetDurableCqs message(new DataOutputUnderTest(), @@ -256,7 +256,7 @@ TEST_F(TcrMessageTest, testConstructor2WithREQUEST) { message); } -TEST_F(TcrMessageTest, testConstructor2WithDESTROY) { +TEST_F(TcrMessageTest, testConstructor2WithDestroy) { using apache::geode::client::TcrMessageDestroy; TcrMessageDestroy message( @@ -275,7 +275,7 @@ TEST_F(TcrMessageTest, testConstructor2WithDESTROY) { message); } -TEST_F(TcrMessageTest, testConstructor2WithINVALIDATE) { +TEST_F(TcrMessageTest, testConstructor2WithInvalidate) { using apache::geode::client::TcrMessageInvalidate; TcrMessageInvalidate message( @@ -295,7 +295,7 @@ TEST_F(TcrMessageTest, testConstructor2WithINVALIDATE) { message); } -TEST_F(TcrMessageTest, testConstructor3WithPUT) { +TEST_F(TcrMessageTest, testConstructor3WithPut) { using apache::geode::client::TcrMessagePut; TcrMessagePut message( @@ -373,7 +373,7 @@ TEST_F(TcrMessageTest, TcrMessageRegisterInterestListWithManyKeys) { message); } -TEST_F(TcrMessageTest, testConstructor5WithUNREGISTER_INTERST_LIST) { +TEST_F(TcrMessageTest, testConstructor5WithUnregisterInteresetList) { using apache::geode::client::TcrMessageUnregisterInterestList; std::vector> keys; @@ -394,7 +394,7 @@ TEST_F(TcrMessageTest, testConstructor5WithUNREGISTER_INTERST_LIST) { message); } -TEST_F(TcrMessageTest, testConstructorGET_FUNCTION_ATTRIBUTES) { +TEST_F(TcrMessageTest, testConstructorGetFunctionAttributes) { using apache::geode::client::TcrMessageGetFunctionAttributes; TcrMessageGetFunctionAttributes message( @@ -408,7 +408,7 @@ TEST_F(TcrMessageTest, testConstructorGET_FUNCTION_ATTRIBUTES) { message); } -TEST_F(TcrMessageTest, testConstructorKEY_SET) { +TEST_F(TcrMessageTest, testConstructorKeySet) { using apache::geode::client::TcrMessageKeySet; TcrMessageKeySet message(new DataOutputUnderTest(), @@ -423,7 +423,7 @@ TEST_F(TcrMessageTest, testConstructorKEY_SET) { message); } -TEST_F(TcrMessageTest, testConstructor6WithCREATE_REGION) { +TEST_F(TcrMessageTest, testConstructor6WithCreateRegion) { using apache::geode::client::TcrMessageCreateRegion; TcrMessageCreateRegion message(new DataOutputUnderTest(), @@ -440,7 +440,7 @@ TEST_F(TcrMessageTest, testConstructor6WithCREATE_REGION) { message); } -TEST_F(TcrMessageTest, testConstructor6WithREGISTER_INTEREST) { +TEST_F(TcrMessageTest, testConstructor6WithRegisterInterest) { using apache::geode::client::TcrMessageRegisterInterest; TcrMessageRegisterInterest message( @@ -461,7 +461,7 @@ TEST_F(TcrMessageTest, testConstructor6WithREGISTER_INTEREST) { message); } -TEST_F(TcrMessageTest, testConstructor6WithUNREGISTER_INTEREST) { +TEST_F(TcrMessageTest, testConstructor6WithUnregisterInterest) { using apache::geode::client::TcrMessageUnregisterInterest; TcrMessageUnregisterInterest message( @@ -481,7 +481,7 @@ TEST_F(TcrMessageTest, testConstructor6WithUNREGISTER_INTEREST) { message); } -TEST_F(TcrMessageTest, testConstructorGET_PDX_TYPE_BY_ID) { +TEST_F(TcrMessageTest, testConstructorGetPdxTypeById) { using apache::geode::client::TcrMessageGetPdxTypeById; TcrMessageGetPdxTypeById message(new DataOutputUnderTest(), 42, @@ -493,7 +493,7 @@ TEST_F(TcrMessageTest, testConstructorGET_PDX_TYPE_BY_ID) { message); } -TEST_F(TcrMessageTest, testConstructorGET_PDX_ENUM_BY_ID) { +TEST_F(TcrMessageTest, testConstructorGetPdxEnumById) { using apache::geode::client::TcrMessageGetPdxEnumById; TcrMessageGetPdxEnumById message(new DataOutputUnderTest(), 42, @@ -505,7 +505,7 @@ TEST_F(TcrMessageTest, testConstructorGET_PDX_ENUM_BY_ID) { message); } -TEST_F(TcrMessageTest, testConstructorGET_PDX_ID_FOR_TYPE) { +TEST_F(TcrMessageTest, testConstructorGetPdxIdForType) { using apache::geode::client::TcrMessageGetPdxIdForType; std::shared_ptr myPtr(CacheableString::createDeserializable()); @@ -518,7 +518,7 @@ TEST_F(TcrMessageTest, testConstructorGET_PDX_ID_FOR_TYPE) { message); } -TEST_F(TcrMessageTest, testConstructorADD_PDX_TYPE) { +TEST_F(TcrMessageTest, testConstructorAddPdxType) { using apache::geode::client::TcrMessageAddPdxType; std::shared_ptr myPtr(CacheableString::createDeserializable()); @@ -532,7 +532,7 @@ TEST_F(TcrMessageTest, testConstructorADD_PDX_TYPE) { message); } -TEST_F(TcrMessageTest, testConstructorGET_PDX_ID_FOR_ENUM) { +TEST_F(TcrMessageTest, testConstructorGetPdxIdForEnum) { using apache::geode::client::TcrMessageGetPdxIdForEnum; TcrMessageGetPdxIdForEnum message( @@ -545,7 +545,7 @@ TEST_F(TcrMessageTest, testConstructorGET_PDX_ID_FOR_ENUM) { EXPECT_MESSAGE_EQ("000000610000000600000001FFFFFFFF00000000010129", message); } -TEST_F(TcrMessageTest, testConstructorADD_PDX_ENUM) { +TEST_F(TcrMessageTest, testConstructorAddPdxEnum) { using apache::geode::client::TcrMessageAddPdxEnum; std::shared_ptr myPtr(CacheableString::createDeserializable()); @@ -574,7 +574,7 @@ TEST_F(TcrMessageTest, testConstructorEventId) { EXPECT_MESSAGE_EQ("000000440000000600000001FFFFFFFF00000000010129", message); } -TEST_F(TcrMessageTest, testConstructorREMOVE_USER_AUTH) { +TEST_F(TcrMessageTest, testConstructorRemoveUserAuth) { using apache::geode::client::TcrMessageRemoveUserAuth; TcrMessageRemoveUserAuth message(new DataOutputUnderTest(), true, @@ -592,7 +592,7 @@ TEST_F(TcrMessageTest, testConstructorREMOVE_USER_AUTH) { EXPECT_MESSAGE_EQ("0000004E0000000600000001FFFFFFFF00000000010000", message2); } -TEST_F(TcrMessageTest, testConstructorUSER_CREDENTIAL_MESSAGE) { +TEST_F(TcrMessageTest, testConstructorUserCredential) { using apache::geode::client::Properties; using apache::geode::client::TcrMessageUserCredential; @@ -607,7 +607,7 @@ TEST_F(TcrMessageTest, testConstructorUSER_CREDENTIAL_MESSAGE) { EXPECT_MESSAGE_EQ("", message); } -TEST_F(TcrMessageTest, testConstructorGET_CLIENT_PARTITION_ATTRIBUTES) { +TEST_F(TcrMessageTest, testConstructorGetClientPartitionAttributes) { using apache::geode::client::TcrMessageGetClientPartitionAttributes; TcrMessageGetClientPartitionAttributes message(new DataOutputUnderTest(), @@ -622,7 +622,7 @@ TEST_F(TcrMessageTest, testConstructorGET_CLIENT_PARTITION_ATTRIBUTES) { message); } -TEST_F(TcrMessageTest, testConstructorGET_CLIENT_PR_METADATA) { +TEST_F(TcrMessageTest, testConstructorGetClientPrMetadata) { using apache::geode::client::TcrMessageGetClientPrMetadata; TcrMessageGetClientPrMetadata message(new DataOutputUnderTest(), @@ -635,7 +635,7 @@ TEST_F(TcrMessageTest, testConstructorGET_CLIENT_PR_METADATA) { "6F6E50524D455441", message); } -TEST_F(TcrMessageTest, testConstructorSIZE) { +TEST_F(TcrMessageTest, testConstructorSize) { using apache::geode::client::TcrMessageSize; TcrMessageSize message(new DataOutputUnderTest(), "testClientRegionSIZE"); @@ -648,7 +648,7 @@ TEST_F(TcrMessageTest, testConstructorSIZE) { message); } -TEST_F(TcrMessageTest, testConstructorEXECUTE_REGION_FUNCTION_SINGLE_HOP) { +TEST_F(TcrMessageTest, testConstructorExecuteRegionFunctionSingleHop) { using apache::geode::client::TcrMessageExecuteRegionFunctionSingleHop; const Region *region = nullptr; @@ -676,7 +676,7 @@ TEST_F(TcrMessageTest, testConstructorEXECUTE_REGION_FUNCTION_SINGLE_HOP) { EXPECT_TRUE(message.hasResult()); } -TEST_F(TcrMessageTest, testConstructorEXECUTE_REGION_FUNCTION) { +TEST_F(TcrMessageTest, testConstructorExecuteRegionFunction) { using apache::geode::client::TcrMessageExecuteRegionFunction; const Region *region = nullptr; @@ -705,7 +705,7 @@ TEST_F(TcrMessageTest, testConstructorEXECUTE_REGION_FUNCTION) { EXPECT_TRUE(testMessage.hasResult()); } -TEST_F(TcrMessageTest, DISABLED_testConstructorEXECUTE_FUNCTION) { +TEST_F(TcrMessageTest, DISABLED_testConstructorExecuteFunction) { using apache::geode::client::TcrMessageExecuteFunction; std::shared_ptr myCacheablePtr( @@ -725,7 +725,7 @@ TEST_F(TcrMessageTest, DISABLED_testConstructorEXECUTE_FUNCTION) { testMessage); } -TEST_F(TcrMessageTest, testConstructorEXECUTECQ_MSG_TYPE) { +TEST_F(TcrMessageTest, testConstructorExecuteCq) { using apache::geode::client::TcrMessageExecuteCq; std::shared_ptr myCacheablePtr( @@ -744,7 +744,7 @@ TEST_F(TcrMessageTest, testConstructorEXECUTECQ_MSG_TYPE) { testMessage); } -TEST_F(TcrMessageTest, testConstructorWithGinormousQueryEXECUTECQ_MSG_TYPE) { +TEST_F(TcrMessageTest, testConstructorWithGinormousQueryExecuteCq) { using apache::geode::client::TcrMessageExecuteCq; std::shared_ptr myCacheablePtr( @@ -778,7 +778,7 @@ TEST_F(TcrMessageTest, testConstructorWithGinormousQueryEXECUTECQ_MSG_TYPE) { testMessage); } -TEST_F(TcrMessageTest, testConstructorEXECUTECQ_WITH_IR_MSG_TYPE) { +TEST_F(TcrMessageTest, testConstructorExecuteCqWithIr) { using apache::geode::client::TcrMessageExecuteCqWithIr; std::shared_ptr myCacheablePtr( @@ -798,7 +798,7 @@ TEST_F(TcrMessageTest, testConstructorEXECUTECQ_WITH_IR_MSG_TYPE) { testMessage); } -TEST_F(TcrMessageTest, testConstructorPING) { +TEST_F(TcrMessageTest, testConstructorPing) { using apache::geode::client::TcrMessagePing; std::shared_ptr myCacheablePtr( @@ -811,7 +811,7 @@ TEST_F(TcrMessageTest, testConstructorPING) { EXPECT_MESSAGE_EQ("000000050000000000000000FFFFFFFF00", testMessage); } -TEST_F(TcrMessageTest, testConstructorCLOSE_CONNECTION) { +TEST_F(TcrMessageTest, testConstructorCloseConnection) { using apache::geode::client::TcrMessageCloseConnection; std::shared_ptr myCacheablePtr( diff --git a/cppcache/test/ThreadPoolTest.cpp b/cppcache/test/ThreadPoolTest.cpp index f62bf61cfc..3f0bb24b51 100644 --- a/cppcache/test/ThreadPoolTest.cpp +++ b/cppcache/test/ThreadPoolTest.cpp @@ -33,7 +33,7 @@ class TestCallable : public Callable { public: TestCallable() : called_(0) {} - void call() { + void call() override { std::lock_guard lock(mutex_); called_++; condition_.notify_all(); diff --git a/cppcache/test/util/chrono/durationTest.cpp b/cppcache/test/util/chrono/durationTest.cpp index 80e766e138..26f95c91f2 100644 --- a/cppcache/test/util/chrono/durationTest.cpp +++ b/cppcache/test/util/chrono/durationTest.cpp @@ -29,7 +29,7 @@ using apache::geode::internal::chrono::duration::from_string; using apache::geode::internal::chrono::duration::to_string; using apache::geode::util::chrono::duration::assert_bounds; -TEST(util_chrono_durationTest, ceil) { +TEST(durationTest, ceil) { EXPECT_EQ(std::chrono::seconds(1), _ceil(std::chrono::milliseconds(1))); EXPECT_EQ(std::chrono::milliseconds(1), @@ -38,7 +38,7 @@ TEST(util_chrono_durationTest, ceil) { _ceil(std::chrono::milliseconds(1))); } -TEST(util_chrono_durationTest, to_string) { +TEST(durationTest, toString) { EXPECT_EQ("42h", to_string(std::chrono::hours(42))); EXPECT_EQ("42min", to_string(std::chrono::minutes(42))); EXPECT_EQ("42s", to_string(std::chrono::seconds(42))); @@ -52,7 +52,7 @@ TEST(util_chrono_durationTest, to_string) { to_string(std::chrono::duration>(100))); } -TEST(util_chrono_durationTest, from_string) { +TEST(durationTest, fromString) { EXPECT_EQ(std::chrono::hours(42), from_string("42h")); EXPECT_EQ(std::chrono::minutes(42), from_string("42min")); EXPECT_EQ(std::chrono::seconds(42), from_string("42s")); @@ -63,7 +63,7 @@ TEST(util_chrono_durationTest, from_string) { EXPECT_EQ(std::chrono::nanoseconds(-42), from_string("-42ns")); } -TEST(util_chrono_durationTest, from_stringWithCeil) { +TEST(durationTest, fromStringWithCeil) { EXPECT_EQ(std::chrono::hours(42), from_string("42h")); EXPECT_EQ(std::chrono::hours(1), from_string("42min")); EXPECT_EQ(std::chrono::minutes(1), from_string("42s")); @@ -76,11 +76,11 @@ TEST(util_chrono_durationTest, from_stringWithCeil) { from_string("2000ms")); } -TEST(util_chrono_durationTest, from_stringException) { +TEST(durationTest, fromStringException) { ASSERT_THROW(from_string("42"), std::invalid_argument); } -TEST(util_chrono_durationTest, assert_bounds) { +TEST(durationTest, assertBounds) { auto protocolTimeoutLimit = assert_bounds{}; ASSERT_NO_THROW(protocolTimeoutLimit(std::chrono::milliseconds(2147483647))); diff --git a/cppcache/test/util/functionalTests.cpp b/cppcache/test/util/functionalTests.cpp index 1cd483ca70..eee02695e4 100644 --- a/cppcache/test/util/functionalTests.cpp +++ b/cppcache/test/util/functionalTests.cpp @@ -23,7 +23,7 @@ using apache::geode::client::internal::geode_hash; -TEST(string, geode_hash) { +TEST(string, geodeHash) { auto&& hash = geode_hash{}; EXPECT_EQ(0, hash("")); diff --git a/cppcache/test/util/queueTest.cpp b/cppcache/test/util/queueTest.cpp index b5d68617aa..c31a3b3b2b 100644 --- a/cppcache/test/util/queueTest.cpp +++ b/cppcache/test/util/queueTest.cpp @@ -23,7 +23,7 @@ using apache::geode::client::queue::coalesce; -TEST(util_queueTest, coalesce) { +TEST(queueTest, coalesce) { auto queue = std::deque({1, 1, 1, 2, 3, 4}); coalesce(queue, 1); diff --git a/cppcache/test/util/synchronized_mapTest.cpp b/cppcache/test/util/synchronized_mapTest.cpp index 2a6f79c206..272b4b81f0 100644 --- a/cppcache/test/util/synchronized_mapTest.cpp +++ b/cppcache/test/util/synchronized_mapTest.cpp @@ -26,7 +26,7 @@ using apache::geode::client::synchronized_map; -TEST(synchronized_mapTest, emplaceLocks) { +TEST(SynchronizedMapTest, emplaceLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -48,7 +48,7 @@ TEST(synchronized_mapTest, emplaceLocks) { EXPECT_EQ(2, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, eraseKeyLocks) { +TEST(SynchronizedMapTest, eraseKeyLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -63,7 +63,7 @@ TEST(synchronized_mapTest, eraseKeyLocks) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, beginLocks) { +TEST(SynchronizedMapTest, beginLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -80,7 +80,7 @@ TEST(synchronized_mapTest, beginLocks) { EXPECT_EQ(0, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, beginConstLocks) { +TEST(SynchronizedMapTest, beginConstLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -98,7 +98,7 @@ TEST(synchronized_mapTest, beginConstLocks) { EXPECT_EQ(0, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, cbeginLocks) { +TEST(SynchronizedMapTest, cbeginLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -115,7 +115,7 @@ TEST(synchronized_mapTest, cbeginLocks) { EXPECT_EQ(0, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, endLocks) { +TEST(SynchronizedMapTest, endLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -133,7 +133,7 @@ TEST(synchronized_mapTest, endLocks) { EXPECT_EQ(0, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, endConsLocks) { +TEST(SynchronizedMapTest, endConsLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -152,7 +152,7 @@ TEST(synchronized_mapTest, endConsLocks) { EXPECT_EQ(0, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, cendLocks) { +TEST(SynchronizedMapTest, cendLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -170,7 +170,7 @@ TEST(synchronized_mapTest, cendLocks) { EXPECT_EQ(0, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, emptyLocks) { +TEST(SynchronizedMapTest, emptyLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -189,7 +189,7 @@ TEST(synchronized_mapTest, emptyLocks) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, sizeLocks) { +TEST(SynchronizedMapTest, sizeLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -208,7 +208,7 @@ TEST(synchronized_mapTest, sizeLocks) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, clearLocks) { +TEST(SynchronizedMapTest, clearLocks) { synchronized_map, TestableRecursiveMutex> map; @@ -223,7 +223,7 @@ TEST(synchronized_mapTest, clearLocks) { EXPECT_TRUE(map.empty()); } -TEST(synchronized_mapTest, findNotLocked) { +TEST(SynchronizedMapTest, findNotLocked) { synchronized_map, TestableRecursiveMutex> map; @@ -244,7 +244,7 @@ TEST(synchronized_mapTest, findNotLocked) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, findConstNotLocked) { +TEST(SynchronizedMapTest, findConstNotLocked) { synchronized_map, TestableRecursiveMutex> map; @@ -266,7 +266,7 @@ TEST(synchronized_mapTest, findConstNotLocked) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, iteratorNotLocked) { +TEST(SynchronizedMapTest, iteratorNotLocked) { synchronized_map, TestableRecursiveMutex> map; @@ -305,7 +305,7 @@ TEST(synchronized_mapTest, iteratorNotLocked) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, make_lockDefault) { +TEST(SynchronizedMapTest, makeLockDefault) { synchronized_map, TestableRecursiveMutex> map; @@ -321,7 +321,7 @@ TEST(synchronized_mapTest, make_lockDefault) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, make_lock_WithUniqueLock) { +TEST(SynchronizedMapTest, makeLockWithUniqueLock) { synchronized_map, TestableRecursiveMutex> map; @@ -337,7 +337,7 @@ TEST(synchronized_mapTest, make_lock_WithUniqueLock) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, make_lock_WithUniqueLockDefered) { +TEST(SynchronizedMapTest, makeLockWithUniqueLockDefered) { synchronized_map, TestableRecursiveMutex> map; @@ -357,7 +357,7 @@ TEST(synchronized_mapTest, make_lock_WithUniqueLockDefered) { EXPECT_EQ(1, map.mutex().unlock_count_); } -TEST(synchronized_mapTest, insertIteratorIteratorLocks) { +TEST(SynchronizedMapTest, insertIteratorIteratorLocks) { std::unordered_map source = {{"a", "A"}, {"b", "B"}}; diff --git a/cppcache/test/util/synchronized_setTest.cpp b/cppcache/test/util/synchronized_setTest.cpp index 98f8563210..005aa21f91 100644 --- a/cppcache/test/util/synchronized_setTest.cpp +++ b/cppcache/test/util/synchronized_setTest.cpp @@ -26,7 +26,7 @@ using apache::geode::client::synchronized_set; -TEST(synchronized_setTest, emplaceLocks) { +TEST(SynchronizedSetTest, emplaceLocks) { synchronized_set, TestableRecursiveMutex> set; auto result = set.emplace("a"); @@ -44,7 +44,7 @@ TEST(synchronized_setTest, emplaceLocks) { EXPECT_EQ(2, set.mutex().unlock_count_); } -TEST(synchronized_setTest, eraseKeyLocks) { +TEST(SynchronizedSetTest, eraseKeyLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -57,7 +57,7 @@ TEST(synchronized_setTest, eraseKeyLocks) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, beginLocks) { +TEST(SynchronizedSetTest, beginLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -72,7 +72,7 @@ TEST(synchronized_setTest, beginLocks) { EXPECT_EQ(0, set.mutex().unlock_count_); } -TEST(synchronized_setTest, beginConstLocks) { +TEST(SynchronizedSetTest, beginConstLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -88,7 +88,7 @@ TEST(synchronized_setTest, beginConstLocks) { EXPECT_EQ(0, set.mutex().unlock_count_); } -TEST(synchronized_setTest, cbeginLocks) { +TEST(SynchronizedSetTest, cbeginLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -103,7 +103,7 @@ TEST(synchronized_setTest, cbeginLocks) { EXPECT_EQ(0, set.mutex().unlock_count_); } -TEST(synchronized_setTest, endLocks) { +TEST(SynchronizedSetTest, endLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -119,7 +119,7 @@ TEST(synchronized_setTest, endLocks) { EXPECT_EQ(0, set.mutex().unlock_count_); } -TEST(synchronized_setTest, endConsLocks) { +TEST(SynchronizedSetTest, endConsLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -136,7 +136,7 @@ TEST(synchronized_setTest, endConsLocks) { EXPECT_EQ(0, set.mutex().unlock_count_); } -TEST(synchronized_setTest, cendLocks) { +TEST(SynchronizedSetTest, cendLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -152,7 +152,7 @@ TEST(synchronized_setTest, cendLocks) { EXPECT_EQ(0, set.mutex().unlock_count_); } -TEST(synchronized_setTest, emptyLocks) { +TEST(SynchronizedSetTest, emptyLocks) { synchronized_set, TestableRecursiveMutex> set; ASSERT_TRUE(set.empty()); @@ -169,7 +169,7 @@ TEST(synchronized_setTest, emptyLocks) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, sizeLocks) { +TEST(SynchronizedSetTest, sizeLocks) { synchronized_set, TestableRecursiveMutex> set; ASSERT_EQ(0, set.size()); @@ -186,7 +186,7 @@ TEST(synchronized_setTest, sizeLocks) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, clearLocks) { +TEST(SynchronizedSetTest, clearLocks) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -199,7 +199,7 @@ TEST(synchronized_setTest, clearLocks) { EXPECT_TRUE(set.empty()); } -TEST(synchronized_setTest, findNotLocked) { +TEST(SynchronizedSetTest, findNotLocked) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -218,7 +218,7 @@ TEST(synchronized_setTest, findNotLocked) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, findConstNotLocked) { +TEST(SynchronizedSetTest, findConstNotLocked) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -238,7 +238,7 @@ TEST(synchronized_setTest, findConstNotLocked) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, iteratorNotLocked) { +TEST(SynchronizedSetTest, iteratorNotLocked) { synchronized_set, TestableRecursiveMutex> set; set.emplace("a"); @@ -275,7 +275,7 @@ TEST(synchronized_setTest, iteratorNotLocked) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, make_lockDefault) { +TEST(SynchronizedSetTest, makeLockDefault) { synchronized_set, TestableRecursiveMutex> set; { @@ -289,7 +289,7 @@ TEST(synchronized_setTest, make_lockDefault) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, make_lock_WithUniqueLock) { +TEST(SynchronizedSetTest, makeLockWithUniqueLock) { synchronized_set, TestableRecursiveMutex> set; { @@ -303,7 +303,7 @@ TEST(synchronized_setTest, make_lock_WithUniqueLock) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, make_lock_WithUniqueLockDefered) { +TEST(SynchronizedSetTest, makeLockWithUniqueLockDefered) { synchronized_set, TestableRecursiveMutex> set; { @@ -321,7 +321,7 @@ TEST(synchronized_setTest, make_lock_WithUniqueLockDefered) { EXPECT_EQ(1, set.mutex().unlock_count_); } -TEST(synchronized_setTest, insertIteratorIteratorLocks) { +TEST(SynchronizedSetTest, insertIteratorIteratorLocks) { std::unordered_set source = {"a", "b"}; synchronized_set, TestableRecursiveMutex> set; @@ -333,7 +333,7 @@ TEST(synchronized_setTest, insertIteratorIteratorLocks) { EXPECT_EQ(2, set.size()); } -TEST(synchronized_setTest, insertRvalue) { +TEST(SynchronizedSetTest, insertRvalue) { synchronized_set, TestableRecursiveMutex> set; set.insert("a"); @@ -343,7 +343,7 @@ TEST(synchronized_setTest, insertRvalue) { EXPECT_EQ(1, set.size()); } -TEST(synchronized_setTest, insertLvalue) { +TEST(SynchronizedSetTest, insertLvalue) { synchronized_set, TestableRecursiveMutex> set; std::string value = "a"; @@ -354,7 +354,7 @@ TEST(synchronized_setTest, insertLvalue) { EXPECT_EQ(1, set.size()); } -TEST(synchronized_setTest, compilesWithStdSet) { +TEST(SynchronizedSetTest, compilesWithStdSet) { synchronized_set, TestableRecursiveMutex> set; auto result = set.emplace("a"); diff --git a/tests/cpp/.clang-tidy b/tests/cpp/.clang-tidy new file mode 100644 index 0000000000..6f51aee50b --- /dev/null +++ b/tests/cpp/.clang-tidy @@ -0,0 +1,4 @@ +--- +InheritParentConfig: true +Checks: '' +... diff --git a/tests/cpp/fwklib/FwkBBServer.hpp b/tests/cpp/fwklib/FwkBBServer.hpp index 0404f430ce..64bc845a30 100644 --- a/tests/cpp/fwklib/FwkBBServer.hpp +++ b/tests/cpp/fwklib/FwkBBServer.hpp @@ -151,9 +151,9 @@ class BBProcessor : public ServiceTask { BBProcessor(UDPMessageQueues* shared, FwkBBServer* server) : ServiceTask(shared), m_queues(shared), m_server(server) {} - virtual ~BBProcessor() {} + ~BBProcessor() noexcept override = default; - virtual int doTask() { + int doTask() override { while (*m_run) { try { UDPMessage* msg = m_queues->getInbound(); @@ -214,8 +214,8 @@ class BBProcessor : public ServiceTask { } return 0; } - virtual void initialize() {} - virtual void finalize() {} + void initialize() override {} + void finalize() override {} }; } // namespace testframework diff --git a/tests/cpp/fwklib/FwkLog.hpp b/tests/cpp/fwklib/FwkLog.hpp index cd34892310..1ceb490a05 100644 --- a/tests/cpp/fwklib/FwkLog.hpp +++ b/tests/cpp/fwklib/FwkLog.hpp @@ -51,72 +51,53 @@ const char* getNodeName(); #ifdef DEBUG #define FWKDEBUG(x) \ - { \ + do { \ std::ostringstream os; \ os << x; \ plog("Debug", os.str().c_str(), __FILE__, __LINE__); \ - } - -#define FWKSLEEP(x) \ - { ACE_OS::sleep(ACE_Time_Value(x, 0)); } + } while (0) #else #define FWKDEBUG(x) -#define FWKSLEEP(x) \ - { ACE_OS::sleep(ACE_Time_Value(x, 0)); } - #endif #define FWKINFO(x) \ - { \ + do { \ std::ostringstream os; \ os << x; \ apache::geode::client::testframework::plog("Info", os.str().c_str(), \ __FILE__, __LINE__); \ - } + } while (0) #define FWKWARN(x) \ - { \ + do { \ std::ostringstream os; \ os << x; \ apache::geode::client::testframework::plog("Warn", os.str().c_str(), \ __FILE__, __LINE__); \ - } + } while (0) #define FWKERROR(x) \ - { \ + do { \ std::ostringstream os; \ os << x; \ apache::geode::client::testframework::plog("Error", os.str().c_str(), \ __FILE__, __LINE__); \ - } + } while (0) #define FWKSEVERE(x) \ - { \ + do { \ std::ostringstream os; \ os << x; \ apache::geode::client::testframework::plog("Severe", os.str().c_str(), \ __FILE__, __LINE__); \ - } + } while (0) #define FWKEXCEPTION(x) \ - { \ + do { \ std::ostringstream os; \ os << x << " In file: " << __FILE__ << " at line: " << __LINE__; \ throw apache::geode::client::testframework::FwkException( \ os.str().c_str()); \ - } - -#define WAITFORDEBUGGER(x) \ - { \ - apache::geode::client::testframework::plog( \ - "Info", "Waiting for debugger ...", __FILE__, __LINE__); \ - for (int32_t i = x; i > 0; i--) ACE_OS::sleep(ACE_Time_Value(1, 0)); \ - } - -#define DUMPSTACK(x) \ - { \ - FWKSEVERE(x); \ - apache::geode::client::testframework::dumpStack(); \ - } + } while (0) } // namespace testframework } // namespace client diff --git a/tests/cpp/fwklib/Service.hpp b/tests/cpp/fwklib/Service.hpp index fea7d71d63..ecb0817f93 100644 --- a/tests/cpp/fwklib/Service.hpp +++ b/tests/cpp/fwklib/Service.hpp @@ -69,7 +69,7 @@ class Service : public ACE_Task_Base { ACE_Thread_Mutex m_Mutex; ACE_DLList m_TaskQueue; - int32_t svc(); + int32_t svc() override; inline void putQ(ServiceTask* task, uint32_t cnt = 1) { ACE_Guard guard(m_Mutex); @@ -85,7 +85,7 @@ class Service : public ACE_Task_Base { public: explicit Service(int32_t threadCnt); - inline ~Service() { stopThreads(); } + ~Service() override { stopThreads(); } int32_t runThreaded(ServiceTask* task, uint32_t threads); @@ -119,8 +119,7 @@ class SafeQueue { if (m_queue.size() == 0) { ACE_Time_Value until(2); until += ACE_OS::gettimeofday(); - ; - int32_t res = m_cond.wait(&until); + auto res = m_cond.wait(&until); if (res == -1) return nullptr; } return m_queue.delete_head(); diff --git a/tests/cpp/fwklib/TimeBomb.hpp b/tests/cpp/fwklib/TimeBomb.hpp index bfb9650638..7171db01e3 100644 --- a/tests/cpp/fwklib/TimeBomb.hpp +++ b/tests/cpp/fwklib/TimeBomb.hpp @@ -40,7 +40,7 @@ class TimeBomb : public ACE_Task_Base { int32_t m_exitCode; std::string m_msg; - int32_t svc(); + int32_t svc() override; public: inline TimeBomb(uint32_t seconds, int32_t exitCode, const std::string& msg) @@ -52,7 +52,7 @@ class TimeBomb : public ACE_Task_Base { inline TimeBomb() : m_stop(false), m_armed(false), m_seconds(0), m_exitCode(-1) {} - inline ~TimeBomb() { + ~TimeBomb() override { m_armed = false; m_stop = true; wait(); diff --git a/tests/cpp/fwklib/UDPIpc.hpp b/tests/cpp/fwklib/UDPIpc.hpp index b4201fe088..a8cd9be9fb 100644 --- a/tests/cpp/fwklib/UDPIpc.hpp +++ b/tests/cpp/fwklib/UDPIpc.hpp @@ -89,7 +89,7 @@ class UDPMessage : public IPCMessage { setCmd(msg.getCmd()); } - virtual ~UDPMessage() {} + ~UDPMessage() noexcept override = default; void setCmd(UdpCmds cmd) { m_hdr.cmd = cmd; } @@ -127,7 +127,7 @@ class UDPMessage : public IPCMessage { m_hdr.length = 0; } - virtual void clear() { + virtual void clear() override { clearHdr(); m_msg.clear(); } @@ -162,7 +162,7 @@ class UDPMessageQueues : public SharedTaskObject { public: explicit UDPMessageQueues(std::string label) : m_cntInbound(), m_cntOutbound(0), m_cntProcessed(0), m_label(label) {} - ~UDPMessageQueues() { + ~UDPMessageQueues() noexcept override { FWKINFO(m_label << "MessageQueues::Inbound count: " << m_cntInbound); FWKINFO(m_label << "MessageQueues::Processed count: " << m_cntProcessed); FWKINFO(m_label << "MessageQueues::Outbound count: " << m_cntOutbound); @@ -192,8 +192,8 @@ class UDPMessageQueues : public SharedTaskObject { return msg; } - virtual void initialize() {} - virtual void finalize() {} + void initialize() override {} + void finalize() override {} }; class Receiver : public ServiceTask { @@ -213,15 +213,15 @@ class Receiver : public ServiceTask { m_queues = dynamic_cast(m_shared); } - virtual ~Receiver() {} + virtual ~Receiver() noexcept override = default; bool isListener() { return (m_listener == ACE_Thread::self()); } - int32_t doTask(); + int32_t doTask() override; - void initialize(); + void initialize() override; - void finalize() { m_io->close(); } + void finalize() override { m_io->close(); } }; class STReceiver : public ServiceTask { @@ -237,13 +237,13 @@ class STReceiver : public ServiceTask { m_queues = dynamic_cast(m_shared); } - virtual ~STReceiver() {} + ~STReceiver() noexcept override = default; - int32_t doTask(); + int32_t doTask() override; - void initialize(); + void initialize() override; - void finalize() { m_io.close(); } + void finalize() override { m_io.close(); } }; class Processor : public ServiceTask { @@ -256,9 +256,9 @@ class Processor : public ServiceTask { m_queues = dynamic_cast(m_shared); } - virtual ~Processor() {} + ~Processor() noexcept override = default; - int32_t doTask() { + int32_t doTask() override { while (*m_run) { UDPMessage* msg = m_queues->getInbound(); if (msg) { @@ -267,8 +267,8 @@ class Processor : public ServiceTask { } return 0; } - void initialize() {} - void finalize() {} + void initialize() override {} + void finalize() override {} }; class Responder : public ServiceTask { @@ -284,14 +284,15 @@ class Responder : public ServiceTask { m_queues = dynamic_cast(m_shared); } - virtual ~Responder() {} + ~Responder() noexcept override = default; - int32_t doTask(); + int32_t doTask() override; - void initialize(); + void initialize() override; - void finalize() { m_io->close(); } + void finalize() override { m_io->close(); } }; + } // namespace testframework } // namespace client } // namespace geode diff --git a/tests/cpp/security/DummyCredentialGenerator.hpp b/tests/cpp/security/DummyCredentialGenerator.hpp index ac9e2588a5..a81ac58830 100644 --- a/tests/cpp/security/DummyCredentialGenerator.hpp +++ b/tests/cpp/security/DummyCredentialGenerator.hpp @@ -31,10 +31,7 @@ namespace security { class DummyCredentialGenerator : public CredentialGenerator { public: - DummyCredentialGenerator() : CredentialGenerator(ID_DUMMY, "DUMMY") { - ; - ; - } + DummyCredentialGenerator() : CredentialGenerator(ID_DUMMY, "DUMMY") {} std::string getInitArgs(std::string workingDir, bool userMode) override { std::string additionalArgs; diff --git a/tests/cpp/security/XmlAuthzCredentialGenerator.hpp b/tests/cpp/security/XmlAuthzCredentialGenerator.hpp index 5b120a29c5..c951689edd 100644 --- a/tests/cpp/security/XmlAuthzCredentialGenerator.hpp +++ b/tests/cpp/security/XmlAuthzCredentialGenerator.hpp @@ -84,7 +84,7 @@ class XmlAuthzCredentialGenerator { /* initialize random seed: */ srand(static_cast(time(nullptr))); } - virtual ~XmlAuthzCredentialGenerator() { ; } + virtual ~XmlAuthzCredentialGenerator() {} virtual void getAllowedCredentials(opCodeList& opCode, std::shared_ptr& prop, @@ -109,7 +109,7 @@ class XmlAuthzCredentialGenerator { case ID_DUMMY2: case ID_DUMMY3: break; - }; + } } catch (...) { reset(); @@ -147,7 +147,7 @@ class XmlAuthzCredentialGenerator { break; case NO_ROLE: /* UNNECESSARY role = role*/ break; - }; + } switch (m_id) { case ID_DUMMY: @@ -164,7 +164,7 @@ class XmlAuthzCredentialGenerator { case ID_DUMMY2: case ID_DUMMY3: break; - }; + } } catch (...) { reset(); @@ -200,7 +200,7 @@ class XmlAuthzCredentialGenerator { case NO_ROLE: sprintf(userName, kPRiUsername, "user", randomValue(2)); break; - }; + } (*m_prop)->insert("security-username", userName); (*m_prop)->insert("security-password", userName); @@ -250,7 +250,7 @@ class XmlAuthzCredentialGenerator { sprintf(userName, kPRiUsername, userPrefix.c_str(), adminIndices[randomValue(adminIndSz)]); break; - }; + } FWKINFO("inserted " << validity << " username " << userName); return std::string(userName); } diff --git a/tests/cpp/testobject/VariousPdxTypes.cpp b/tests/cpp/testobject/VariousPdxTypes.cpp index a0a6007fa9..a3f9ede8e7 100644 --- a/tests/cpp/testobject/VariousPdxTypes.cpp +++ b/tests/cpp/testobject/VariousPdxTypes.cpp @@ -334,7 +334,6 @@ PdxTypes6::PdxTypes6() { m_s2 = "two"; bytes128 = std::vector(2); bytes128[0] = 0x34; - ; bytes128[1] = 0x64; m_i1 = 34324; m_i2 = 2144; From fe35dcf2930065575fb770ede8523d046e050bee Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Thu, 31 Dec 2020 16:03:01 -0800 Subject: [PATCH 142/155] Add clang-tools Dockerfile. --- ci/docker/clang-tools/Dockerfile | 49 ++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 ci/docker/clang-tools/Dockerfile diff --git a/ci/docker/clang-tools/Dockerfile b/ci/docker/clang-tools/Dockerfile new file mode 100644 index 0000000000..227c771142 --- /dev/null +++ b/ci/docker/clang-tools/Dockerfile @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:groovy +LABEL maintainer="Apache Geode " +LABEL description="Minimal image for building with clang toolset." + +RUN apt-get update \ + && apt-get install -y \ + bash \ + libssl-dev \ + patch \ + cmake \ + git \ + doxygen \ + openjdk-8-jdk-headless \ + jq \ + && rm -rf /var/lib/apt/lists/* + +ARG CLANG_VERSION=11 +RUN apt-get update \ + && apt-get install -y \ + clang-${CLANG_VERSION} \ + lld-${CLANG_VERSION} \ + clang-format-${CLANG_VERSION} \ + clang-tidy-${CLANG_VERSION} \ + && update-alternatives --install /usr/bin/clang clang /usr/bin/clang-${CLANG_VERSION} 10 \ + && update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-${CLANG_VERSION} 10 \ + && update-alternatives --install /usr/bin/lld lld /usr/bin/lld-${CLANG_VERSION} 10 \ + && update-alternatives --install /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-${CLANG_VERSION} 10 \ + && update-alternatives --install /usr/bin/run-clang-tidy run-clang-tidy /usr/bin/run-clang-tidy-${CLANG_VERSION} 10 \ + && update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-${CLANG_VERSION} 10 \ + && update-alternatives --install /usr/bin/cc cc /usr/bin/clang 10 \ + && update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 10 \ + && update-alternatives --install /usr/bin/ld ld /usr/bin/lld 10 \ + && rm -rf /var/lib/apt/lists/* From cfd9b29493e8226f76bbbe83fd7e02c391ce1879 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 08:34:52 -0800 Subject: [PATCH 143/155] Add clang-tools Dockerfile. --- ci/base/pipeline.yml | 88 ++++++++++++++++++++++++++++++++ ci/docker/clang-tools/Dockerfile | 8 ++- ci/release/pipeline.yml | 5 ++ 3 files changed, 96 insertions(+), 5 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index 52acb8dad0..dbc7cca368 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -32,18 +32,104 @@ resource_types: - #@ resource_type("gci-resource", "smgoller/gci-resource") - #@ resource_type("gcs-resource", "frodenas/gcs-resource") + - name: apache-directory-index + type: docker-image + source: + repository: mastertinner/apache-directory-index-resource resources: - #@ semver_resource("version", "1.14.0-build.0") - #@ gcr_image_resource("gcloud-image", "google.com/cloudsdktool/cloud-sdk", "alpine") - #@ project_gcr_image_resource("task-image", "geode-native-task") + - #@ project_gcr_image_resource("clang-tools-image", "geode-native-clang-tools") - #@ git_resource("source", data.values.repository.url, data.values.repository.branch, ignore_paths=["ci/*", "packer/*"]) - #@ template.replace(build_resources(data.values.builds, data.values.configs)) - #@ registry_image_resource("ytt-image", "gerritk/ytt") - #@ git_resource("ci-source", data.values.repository.url, data.values.repository.branch, ["ci/*"]) + - name: geode-archive + type: apache-directory-index + source: + directory: https://archive.apache.org/dist/geode/ + folder_pattern: $VERSION + file_pattern: apache-geode-$VERSION.tgz jobs: - #@ template.replace(build_jobs(data.values.builds, data.values.configs)) + - name: check-source + plan: + - in_parallel: + fail_fast: true + steps: + - get: clang-tools-image + - get: source + trigger: true + - do: + - in_parallel: + fail_fast: true + steps: + - get: geode-archive + - get: task-image + - task: extract-geode + image: task-image + config: + platform: linux + inputs: + - name: geode-archive + outputs: + - name: geode + params: + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -xueo pipefail + tar -zxf geode-archive/apache-geode-*.tgz -C geode --strip-components=1 + - in_parallel: + steps: + - task: clang-tidy + image: clang-tools-image + config: + platform: linux + inputs: + - name: source + - name: geode + outputs: + - name: build + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -xueo pipefail + export GEODE_HOME=$(pwd)/geode + cd build + cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON + cmake --build dependencies --parallel $(nproc) + run-clang-tidy -j $(nproc) -quiet + - task: clang-format + image: clang-tools-image + config: + platform: linux + inputs: + - name: source + - name: geode + outputs: + - name: build + run: + path: bash + args: + - -c + #@yaml/text-templated-strings + - | + set -xueo pipefail + export GEODE_HOME=$(pwd)/geode + cd build + cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON + jq -r '.[].file' compile_commands.json | sort | uniq | xargs clang-format --dry-run -Werror --verbose + - #@ update_pipeline_job() groups: @@ -53,6 +139,7 @@ groups: #@ for/end config in data.values.configs: - #@ build_job_name(build, config) #@ end + - check-source - name: meta jobs: - #@ update_pipeline_job_name() @@ -63,3 +150,4 @@ groups: #@ for/end config in data.values.configs: - #@ build_job_name(build, config) #@ end + - check-source diff --git a/ci/docker/clang-tools/Dockerfile b/ci/docker/clang-tools/Dockerfile index 227c771142..421e133949 100644 --- a/ci/docker/clang-tools/Dockerfile +++ b/ci/docker/clang-tools/Dockerfile @@ -18,6 +18,7 @@ FROM ubuntu:groovy LABEL maintainer="Apache Geode " LABEL description="Minimal image for building with clang toolset." +ARG CLANG_VERSION=11 RUN apt-get update \ && apt-get install -y \ bash \ @@ -28,11 +29,6 @@ RUN apt-get update \ doxygen \ openjdk-8-jdk-headless \ jq \ - && rm -rf /var/lib/apt/lists/* - -ARG CLANG_VERSION=11 -RUN apt-get update \ - && apt-get install -y \ clang-${CLANG_VERSION} \ lld-${CLANG_VERSION} \ clang-format-${CLANG_VERSION} \ @@ -46,4 +42,6 @@ RUN apt-get update \ && update-alternatives --install /usr/bin/cc cc /usr/bin/clang 10 \ && update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++ 10 \ && update-alternatives --install /usr/bin/ld ld /usr/bin/lld 10 \ + && apt-get -y autoremove \ + && apt-get -y autoclean \ && rm -rf /var/lib/apt/lists/* diff --git a/ci/release/pipeline.yml b/ci/release/pipeline.yml index 92817d384f..1f6bb6e8f2 100644 --- a/ci/release/pipeline.yml +++ b/ci/release/pipeline.yml @@ -55,6 +55,8 @@ jobs: - #@ version_source_job() #@overlay/append - #@ docker_job("task-image", "ci-source", "ci/docker/task") + #@overlay/append + - #@ docker_job("clang-tools-image", "ci-source", "ci/docker/clang-tools") #@ for build in data.values.builds: #@overlay/append @@ -117,6 +119,7 @@ groups: #@ for/end build in data.values.builds: - #@ packer_job_name(build) - #@ docker_job_name("task-image") + - #@ docker_job_name("clang-tools-image") #@overlay/match by="name" - name: all jobs: @@ -127,6 +130,8 @@ groups: #@overlay/append - #@ docker_job_name("task-image") #@overlay/append + - #@ docker_job_name("clang-tools-image") + #@overlay/append - #@ version_source_job_name() #@overlay/append - github-release From add64a08819f74631dd8d726348d7ce1d76ef805 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 08:42:02 -0800 Subject: [PATCH 144/155] clang-format fixes --- cppcache/integration/framework/Cluster.cpp | 7 ++-- sqliteimpl/SqLiteHelper.cpp | 36 ++++++++--------- sqliteimpl/SqLiteImpl.cpp | 46 +++++++++++----------- 3 files changed, 45 insertions(+), 44 deletions(-) diff --git a/cppcache/integration/framework/Cluster.cpp b/cppcache/integration/framework/Cluster.cpp index d53126dc35..b9a453abe4 100644 --- a/cppcache/integration/framework/Cluster.cpp +++ b/cppcache/integration/framework/Cluster.cpp @@ -434,9 +434,10 @@ void Cluster::start(std::function extraGfshCommands) { servers_.reserve(initialServers_); std::string xmlFile; for (size_t i = 0; i < initialServers_; i++) { - xmlFile = (cacheXMLFiles_.size() == 0) ? "" - : cacheXMLFiles_.size() == 1 ? cacheXMLFiles_[0] - : cacheXMLFiles_[i]; + xmlFile = (cacheXMLFiles_.size() == 0) + ? "" + : cacheXMLFiles_.size() == 1 ? cacheXMLFiles_[0] + : cacheXMLFiles_[i]; servers_.push_back({*this, locators_, name_ + "/server/" + std::to_string(i), xmlFile, diff --git a/sqliteimpl/SqLiteHelper.cpp b/sqliteimpl/SqLiteHelper.cpp index e6ec65154f..06555fbdec 100644 --- a/sqliteimpl/SqLiteHelper.cpp +++ b/sqliteimpl/SqLiteHelper.cpp @@ -15,14 +15,14 @@ * limitations under the License. */ -#include - #include "SqLiteHelper.hpp" +#include + #define QUERY_SIZE 512 -int SqLiteHelper::initDB(const char *regionName, int maxPageCount, int pageSize, - const char *regionDBfile, int busy_timeout_ms) { +int SqLiteHelper::initDB(const char* regionName, int maxPageCount, int pageSize, + const char* regionDBfile, int busy_timeout_ms) { // open the database int retCode = sqlite3_open(regionDBfile, &m_dbHandle); if (retCode == SQLITE_OK) { @@ -52,7 +52,7 @@ int SqLiteHelper::createTable() { SNPRINTF(query, QUERY_SIZE, "CREATE TABLE IF NOT EXISTS %s(key BLOB PRIMARY KEY,value BLOB);", m_tableName); - sqlite3_stmt *stmt; + sqlite3_stmt* stmt; // prepare statement int retCode; @@ -64,14 +64,14 @@ int SqLiteHelper::createTable() { return retCode == SQLITE_DONE ? 0 : retCode; } -int SqLiteHelper::insertKeyValue(void *keyData, int keyDataSize, - void *valueData, int valueDataSize) { +int SqLiteHelper::insertKeyValue(void* keyData, int keyDataSize, + void* valueData, int valueDataSize) { // construct query char query[QUERY_SIZE]; SNPRINTF(query, QUERY_SIZE, "REPLACE INTO %s VALUES(?,?);", m_tableName); // prepare statement - sqlite3_stmt *stmt; + sqlite3_stmt* stmt; int retCode = sqlite3_prepare_v2(m_dbHandle, query, -1, &stmt, nullptr); if (retCode == SQLITE_OK) { // bind parameters and execte statement @@ -84,13 +84,13 @@ int SqLiteHelper::insertKeyValue(void *keyData, int keyDataSize, return retCode == SQLITE_DONE ? 0 : retCode; } -int SqLiteHelper::removeKey(void *keyData, int keyDataSize) { +int SqLiteHelper::removeKey(void* keyData, int keyDataSize) { // construct query char query[QUERY_SIZE]; SNPRINTF(query, QUERY_SIZE, "DELETE FROM %s WHERE key=?;", m_tableName); // prepare statement - sqlite3_stmt *stmt; + sqlite3_stmt* stmt; int retCode = sqlite3_prepare_v2(m_dbHandle, query, -1, &stmt, nullptr); if (retCode == SQLITE_OK) { // bind parameters and execte statement @@ -102,8 +102,8 @@ int SqLiteHelper::removeKey(void *keyData, int keyDataSize) { return retCode == SQLITE_DONE ? 0 : retCode; } -int SqLiteHelper::getValue(void *keyData, int keyDataSize, void *&valueData, - int &valueDataSize) { +int SqLiteHelper::getValue(void* keyData, int keyDataSize, void*& valueData, + int& valueDataSize) { // construct query char query[QUERY_SIZE]; SNPRINTF(query, QUERY_SIZE, @@ -111,7 +111,7 @@ int SqLiteHelper::getValue(void *keyData, int keyDataSize, void *&valueData, m_tableName); // prepare statement - sqlite3_stmt *stmt; + sqlite3_stmt* stmt; int retCode = sqlite3_prepare_v2(m_dbHandle, query, -1, &stmt, nullptr); if (retCode == SQLITE_OK) { // bind parameters and execte statement @@ -119,10 +119,10 @@ int SqLiteHelper::getValue(void *keyData, int keyDataSize, void *&valueData, retCode = sqlite3_step(stmt); if (retCode == SQLITE_ROW) // we will get only one row { - void *tempBuff = const_cast(sqlite3_column_blob(stmt, 0)); + void* tempBuff = const_cast(sqlite3_column_blob(stmt, 0)); valueDataSize = sqlite3_column_int(stmt, 1); valueData = - reinterpret_cast(malloc(sizeof(uint8_t) * valueDataSize)); + reinterpret_cast(malloc(sizeof(uint8_t) * valueDataSize)); memcpy(valueData, tempBuff, valueDataSize); retCode = sqlite3_step(stmt); } @@ -138,7 +138,7 @@ int SqLiteHelper::dropTable() { SNPRINTF(query, QUERY_SIZE, "DROP TABLE %s;", m_tableName); // prepare statement - sqlite3_stmt *stmt; + sqlite3_stmt* stmt; int retCode; retCode = sqlite3_prepare_v2(m_dbHandle, query, -1, &stmt, nullptr); @@ -156,7 +156,7 @@ int SqLiteHelper::closeDB() { return retCode; } -int SqLiteHelper::executePragma(const char *pragmaName, int pragmaValue) { +int SqLiteHelper::executePragma(const char* pragmaName, int pragmaValue) { // create query char query[QUERY_SIZE]; char strVal[50]; @@ -164,7 +164,7 @@ int SqLiteHelper::executePragma(const char *pragmaName, int pragmaValue) { SNPRINTF(query, QUERY_SIZE, "PRAGMA %s = %s;", pragmaName, strVal); // prepare statement - sqlite3_stmt *stmt; + sqlite3_stmt* stmt; int retCode; retCode = sqlite3_prepare_v2(m_dbHandle, query, -1, &stmt, nullptr); diff --git a/sqliteimpl/SqLiteImpl.cpp b/sqliteimpl/SqLiteImpl.cpp index 96a86dfc5d..f1acb09e98 100644 --- a/sqliteimpl/SqLiteImpl.cpp +++ b/sqliteimpl/SqLiteImpl.cpp @@ -15,10 +15,11 @@ * limitations under the License. */ -#include +#include "SqLiteImpl.hpp" + #include +#include -#include "SqLiteImpl.hpp" #include "sqliteimpl_export.h" #ifdef _WIN32 @@ -37,8 +38,8 @@ static constexpr char const* MAX_PAGE_COUNT = "MaxPageCount"; static constexpr char const* PAGE_SIZE = "PageSize"; static constexpr char const* PERSISTENCE_DIR = "PersistenceDirectory"; -void SqLiteImpl::init(const std::shared_ptr ®ion, - const std::shared_ptr &diskProperties) { +void SqLiteImpl::init(const std::shared_ptr& region, + const std::shared_ptr& diskProperties) { // Set the default values int maxPageCount = 0; @@ -107,21 +108,20 @@ void SqLiteImpl::init(const std::shared_ptr ®ion, } } -void SqLiteImpl::write(const std::shared_ptr &key, - const std::shared_ptr &value, - std::shared_ptr &) { +void SqLiteImpl::write(const std::shared_ptr& key, + const std::shared_ptr& value, + std::shared_ptr&) { // Serialize key and value. - auto &cache = m_regionPtr->getCache(); + auto& cache = m_regionPtr->getCache(); auto keyDataBuffer = cache.createDataOutput(); auto valueDataBuffer = cache.createDataOutput(); size_t keyBufferSize, valueBufferSize; keyDataBuffer.writeObject(key); valueDataBuffer.writeObject(value); - void *keyData = - const_cast(keyDataBuffer.getBuffer(&keyBufferSize)); - void *valueData = - const_cast(valueDataBuffer.getBuffer(&valueBufferSize)); + void* keyData = const_cast(keyDataBuffer.getBuffer(&keyBufferSize)); + void* valueData = + const_cast(valueDataBuffer.getBuffer(&valueBufferSize)); if (m_sqliteHelper->insertKeyValue(keyData, static_cast(keyBufferSize), valueData, @@ -132,14 +132,13 @@ void SqLiteImpl::write(const std::shared_ptr &key, bool SqLiteImpl::writeAll() { return true; } std::shared_ptr SqLiteImpl::read( - const std::shared_ptr &key, const std::shared_ptr &) { + const std::shared_ptr& key, const std::shared_ptr&) { // Serialize key. auto keyDataBuffer = m_regionPtr->getCache().createDataOutput(); size_t keyBufferSize; keyDataBuffer.writeObject(key); - void *keyData = - const_cast(keyDataBuffer.getBuffer(&keyBufferSize)); - void *valueData; + void* keyData = const_cast(keyDataBuffer.getBuffer(&keyBufferSize)); + void* valueData; int valueBufferSize; if (m_sqliteHelper->getValue(keyData, static_cast(keyBufferSize), @@ -149,7 +148,7 @@ std::shared_ptr SqLiteImpl::read( // Deserialize object and return value. auto valueDataBuffer = m_regionPtr->getCache().createDataInput( - reinterpret_cast(valueData), valueBufferSize); + reinterpret_cast(valueData), valueBufferSize); std::shared_ptr retValue; valueDataBuffer.readObject(retValue); @@ -176,21 +175,22 @@ void SqLiteImpl::destroyRegion() { #endif } -void SqLiteImpl::destroy(const std::shared_ptr &key, - const std::shared_ptr &) { +void SqLiteImpl::destroy(const std::shared_ptr& key, + const std::shared_ptr&) { // Serialize key and value. auto keyDataBuffer = m_regionPtr->getCache().createDataOutput(); size_t keyBufferSize; keyDataBuffer.writeObject(key); - void *keyData = - const_cast(keyDataBuffer.getBuffer(&keyBufferSize)); + void* keyData = const_cast(keyDataBuffer.getBuffer(&keyBufferSize)); if (m_sqliteHelper->removeKey(keyData, static_cast(keyBufferSize)) != 0) { throw IllegalStateException("Failed to destroy the key from SQLITE."); } } -SqLiteImpl::SqLiteImpl() { m_sqliteHelper = std::unique_ptr(new SqLiteHelper()); } +SqLiteImpl::SqLiteImpl() { + m_sqliteHelper = std::unique_ptr(new SqLiteHelper()); +} void SqLiteImpl::close() { m_sqliteHelper->closeDB(); @@ -215,7 +215,7 @@ extern "C" { using apache::geode::client::PersistenceManager; using apache::geode::client::SqLiteImpl; -SQLITEIMPL_EXPORT PersistenceManager *createSqLiteInstance() { +SQLITEIMPL_EXPORT PersistenceManager* createSqLiteInstance() { return new SqLiteImpl(); } } From c5d2b2c695dbb315ad718790e96de43a53422247 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 08:47:34 -0800 Subject: [PATCH 145/155] clang-format fixes --- cppcache/integration-test/fw_dunit.cpp | 2 +- ...estThinClientSecurityPostAuthorization.cpp | 4 +- cppcache/integration/framework/Cluster.cpp | 7 +- cppcache/src/CacheTransactionManagerImpl.cpp | 4 +- cppcache/src/ExceptionTypes.cpp | 107 ++++++++---------- .../src/VersionedCacheableObjectPartList.cpp | 4 +- 6 files changed, 63 insertions(+), 65 deletions(-) diff --git a/cppcache/integration-test/fw_dunit.cpp b/cppcache/integration-test/fw_dunit.cpp index fa1350abd5..ed31da3d13 100644 --- a/cppcache/integration-test/fw_dunit.cpp +++ b/cppcache/integration-test/fw_dunit.cpp @@ -140,7 +140,7 @@ class NamingContextImpl : virtual public NamingContext { LOGCOORDINATOR(func); LOGCOORDINATOR("Dump follows:"); dump(); - throw - 1; + throw -1; } return result; } diff --git a/cppcache/integration-test/testThinClientSecurityPostAuthorization.cpp b/cppcache/integration-test/testThinClientSecurityPostAuthorization.cpp index d151bdda2a..a92e224431 100644 --- a/cppcache/integration-test/testThinClientSecurityPostAuthorization.cpp +++ b/cppcache/integration-test/testThinClientSecurityPostAuthorization.cpp @@ -90,7 +90,9 @@ void initClientAuth(char userType, int clientNum = 1) { config->insert("security-password", "geode1"); break; } - default: { break; } + default: { + break; + } } initClient(true, config); } diff --git a/cppcache/integration/framework/Cluster.cpp b/cppcache/integration/framework/Cluster.cpp index b9a453abe4..d53126dc35 100644 --- a/cppcache/integration/framework/Cluster.cpp +++ b/cppcache/integration/framework/Cluster.cpp @@ -434,10 +434,9 @@ void Cluster::start(std::function extraGfshCommands) { servers_.reserve(initialServers_); std::string xmlFile; for (size_t i = 0; i < initialServers_; i++) { - xmlFile = (cacheXMLFiles_.size() == 0) - ? "" - : cacheXMLFiles_.size() == 1 ? cacheXMLFiles_[0] - : cacheXMLFiles_[i]; + xmlFile = (cacheXMLFiles_.size() == 0) ? "" + : cacheXMLFiles_.size() == 1 ? cacheXMLFiles_[0] + : cacheXMLFiles_[i]; servers_.push_back({*this, locators_, name_ + "/server/" + std::to_string(i), xmlFile, diff --git a/cppcache/src/CacheTransactionManagerImpl.cpp b/cppcache/src/CacheTransactionManagerImpl.cpp index 0467cea3a8..9baf25dadf 100644 --- a/cppcache/src/CacheTransactionManagerImpl.cpp +++ b/cppcache/src/CacheTransactionManagerImpl.cpp @@ -160,7 +160,9 @@ GfErrType CacheTransactionManagerImpl::rollback(TXState*, bool) { case TcrMessage::EXCEPTION: { break; } - default: { break; } + default: { + break; + } } } diff --git a/cppcache/src/ExceptionTypes.cpp b/cppcache/src/ExceptionTypes.cpp index 213e2f5779..252a0ee745 100644 --- a/cppcache/src/ExceptionTypes.cpp +++ b/cppcache/src/ExceptionTypes.cpp @@ -73,8 +73,8 @@ using apache::geode::client::UnknownException; throw NotConnectedException{message}; } - [[noreturn]] void messageException(std::string message, std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void messageException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": message from server could not be handled"); throw MessageException{message}; @@ -93,8 +93,8 @@ using apache::geode::client::UnknownException; throw CacheServerException{message}; } - [[noreturn]] void notOwnerException(std::string message, std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void notOwnerException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": not own the lock"); throw NotOwnerException{message}; } @@ -110,9 +110,9 @@ using apache::geode::client::UnknownException; throw IllegalStateException{message}; } - [[noreturn]] void illegalArgumentException(std::string message, - std::string& exMsg, - GfErrType err, std::string) { +[[noreturn]] void illegalArgumentException(std::string message, + std::string& exMsg, GfErrType err, + std::string) { if (err == GF_CACHE_ILLEGAL_ARGUMENT_EXCEPTION) { message.append(!exMsg.empty() ? exMsg : ": illegal argument"); } @@ -140,9 +140,8 @@ using apache::geode::client::UnknownException; throw CacheWriterException{message}; } - [[noreturn]] void cacheLoaderException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void cacheLoaderException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": exception in CacheLoader"); throw CacheLoaderException{message}; } @@ -154,9 +153,9 @@ using apache::geode::client::UnknownException; throw CacheListenerException{message}; } - [[noreturn]] void regionDestroyedException(std::string message, - std::string& exMsg, - GfErrType err, std::string) { +[[noreturn]] void regionDestroyedException(std::string message, + std::string& exMsg, GfErrType err, + std::string) { if (err == GF_CACHE_REGION_INVALID) { message.append(!exMsg.empty() ? exMsg : ": region not valid"); } @@ -172,8 +171,8 @@ using apache::geode::client::UnknownException; throw CacheProxyException{message}; } - [[noreturn]] void geodeIOException(std::string message, std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void geodeIOException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": Input/Output error in operation"); throw GeodeIOException{message}; } @@ -184,8 +183,8 @@ using apache::geode::client::UnknownException; throw NoSystemException{message}; } - [[noreturn]] void timeoutException(std::string message, std::string& exMsg, - GfErrType err, std::string) { +[[noreturn]] void timeoutException(std::string message, std::string& exMsg, + GfErrType err, std::string) { if (err == GF_CLIENT_WAIT_TIMEOUT) { message.append(!exMsg.empty() ? exMsg @@ -202,9 +201,9 @@ using apache::geode::client::UnknownException; throw OutOfMemoryException{message}; } - [[noreturn]] void bufferSizeExceededException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void bufferSizeExceededException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": Buffer Size Exceeded"); throw BufferSizeExceededException{message}; } @@ -215,9 +214,8 @@ using apache::geode::client::UnknownException; throw LeaseExpiredException{message}; } - [[noreturn]] void regionExistsException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void regionExistsException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": Named Region Exists"); throw RegionExistsException{message}; } @@ -229,9 +227,8 @@ using apache::geode::client::UnknownException; throw EntryNotFoundException{message}; } - [[noreturn]] void entryExistsException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void entryExistsException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": Entry already exists in the region"); throw EntryExistsException{message}; @@ -244,9 +241,8 @@ using apache::geode::client::UnknownException; throw EntryDestroyedException{message}; } - [[noreturn]] void cacheClosedException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void cacheClosedException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": Cache has been closed"); throw CacheClosedException{message}; } @@ -260,9 +256,9 @@ using apache::geode::client::UnknownException; throw StatisticsDisabledException{message}; } - [[noreturn]] void concurrentModificationException(std::string message, - std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void concurrentModificationException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": Concurrent modification in the cache"); throw ConcurrentModificationException{message}; @@ -275,9 +271,9 @@ using apache::geode::client::UnknownException; throw NotAuthorizedException{message}; } - [[noreturn]] void authenticationFailedException(std::string message, - std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void authenticationFailedException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": authentication failed"); throw AuthenticationFailedException{message}; } @@ -289,9 +285,9 @@ using apache::geode::client::UnknownException; throw AuthenticationRequiredException{message}; } - [[noreturn]] void duplicateDurableClientException(std::string message, - std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void duplicateDurableClientException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": Duplicate Durable Client Id"); throw DuplicateDurableClientException{message}; } @@ -302,10 +298,8 @@ using apache::geode::client::UnknownException; throw QueryException{message}; } - [[noreturn]] void noAvailableLocatorsException(std::string, - std::string& exMsg, - GfErrType, - std::string func) { +[[noreturn]] void noAvailableLocatorsException(std::string, std::string& exMsg, + GfErrType, std::string func) { try { throw NoAvailableLocatorsException{ func + (!exMsg.empty() ? exMsg : ": No locators available")}; @@ -322,9 +316,9 @@ using apache::geode::client::UnknownException; throw AllConnectionsInUseException{message}; } - [[noreturn]] void functionExecutionException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void functionExecutionException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": Function execution failed"); throw FunctionExecutionException{message}; } @@ -335,8 +329,8 @@ using apache::geode::client::UnknownException; throw DiskFailureException{message}; } - [[noreturn]] void rollbackException(std::string message, std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void rollbackException(std::string message, std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": Transaction rolled back"); throw RollbackException{message}; } @@ -348,10 +342,9 @@ using apache::geode::client::UnknownException; throw CommitConflictException{message}; } - [[noreturn]] void transactionDataRebalancedException(std::string message, - std::string& exMsg, - GfErrType, - std::string) { +[[noreturn]] void transactionDataRebalancedException(std::string message, + std::string& exMsg, + GfErrType, std::string) { message.append(!exMsg.empty() ? exMsg : ": Transaction data rebalanced exception"); throw TransactionDataRebalancedException{message}; @@ -367,9 +360,9 @@ using apache::geode::client::UnknownException; throw TransactionDataNodeHasDepartedException{message}; } - [[noreturn]] void putAllPartialResultException(std::string message, - std::string& exMsg, - GfErrType, std::string) { +[[noreturn]] void putAllPartialResultException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": PutAll Partial exception"); throw PutAllPartialResultException{message}; } @@ -380,9 +373,9 @@ using apache::geode::client::UnknownException; throw LowMemoryException{message}; } - [[noreturn]] void queryLowMemoryException(std::string message, - std::string& exMsg, GfErrType, - std::string) { +[[noreturn]] void queryLowMemoryException(std::string message, + std::string& exMsg, GfErrType, + std::string) { message.append(!exMsg.empty() ? exMsg : ": Query execution low memory exception"); throw QueryExecutionLowMemoryException{message}; diff --git a/cppcache/src/VersionedCacheableObjectPartList.cpp b/cppcache/src/VersionedCacheableObjectPartList.cpp index 7190424cfc..47215d1628 100644 --- a/cppcache/src/VersionedCacheableObjectPartList.cpp +++ b/cppcache/src/VersionedCacheableObjectPartList.cpp @@ -233,7 +233,9 @@ void VersionedCacheableObjectPartList::fromData(DataInput& input) { versionTag->setInternalMemID(ids.at(idNumber)); break; } - default: { break; } + default: { + break; + } } m_versionTags[index] = versionTag; } From cc49e3aec6d7d64ed9837a8f28602b6f00579bc0 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 08:51:09 -0800 Subject: [PATCH 146/155] Less verbose clang-format --- ci/base/pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index dbc7cca368..f9ef510f93 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -128,7 +128,7 @@ jobs: export GEODE_HOME=$(pwd)/geode cd build cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON - jq -r '.[].file' compile_commands.json | sort | uniq | xargs clang-format --dry-run -Werror --verbose + jq -r '.[].file' compile_commands.json | sort | uniq | xargs clang-format --dry-run -Werror - #@ update_pipeline_job() From 9e523df510b9d633f894240d8892d6932f18b2c5 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 09:04:32 -0800 Subject: [PATCH 147/155] Skip build directory/generated files. --- ci/base/pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index f9ef510f93..0ac45dbb62 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -128,7 +128,7 @@ jobs: export GEODE_HOME=$(pwd)/geode cd build cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON - jq -r '.[].file' compile_commands.json | sort | uniq | xargs clang-format --dry-run -Werror + jq -r '.[].file' compile_commands.json | sort | uniq | grep -v $(pwd) | xargs clang-format --dry-run -Werror - #@ update_pipeline_job() From f9ddbfd16ee9b070400d94433f97f7f975678a89 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 09:10:44 -0800 Subject: [PATCH 148/155] Hide noise in clang-format. --- ci/base/pipeline.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index 0ac45dbb62..c9a4a25709 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -124,10 +124,10 @@ jobs: - -c #@yaml/text-templated-strings - | - set -xueo pipefail + set -ueo pipefail export GEODE_HOME=$(pwd)/geode cd build - cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON + cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON > /dev/null jq -r '.[].file' compile_commands.json | sort | uniq | grep -v $(pwd) | xargs clang-format --dry-run -Werror - #@ update_pipeline_job() From 4b6ebd61c801ae0af2be046131582c37cf50f7c0 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 09:14:47 -0800 Subject: [PATCH 149/155] Hide noise in clang-tidy. --- ci/base/pipeline.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/base/pipeline.yml b/ci/base/pipeline.yml index c9a4a25709..c07289d353 100644 --- a/ci/base/pipeline.yml +++ b/ci/base/pipeline.yml @@ -103,11 +103,11 @@ jobs: - -c #@yaml/text-templated-strings - | - set -xueo pipefail + set -ueo pipefail export GEODE_HOME=$(pwd)/geode cd build - cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON - cmake --build dependencies --parallel $(nproc) + cmake ../source -DCMAKE_EXPORT_COMPILE_COMMANDS=ON > /dev/null + cmake --build dependencies --parallel $(nproc) > /dev/null run-clang-tidy -j $(nproc) -quiet - task: clang-format image: clang-tools-image From a0ec5d2205860979dfb5ac5d1c5daff50c3575dc Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 09:35:05 -0800 Subject: [PATCH 150/155] clang-format --- cppcache/include/geode/CacheableString.hpp | 10 +++++----- cppcache/include/geode/DataInput.hpp | 2 +- tests/cpp/fwklib/FwkBB.hpp | 22 +++++++++++----------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/cppcache/include/geode/CacheableString.hpp b/cppcache/include/geode/CacheableString.hpp index e5973af28c..91a651cf00 100644 --- a/cppcache/include/geode/CacheableString.hpp +++ b/cppcache/include/geode/CacheableString.hpp @@ -54,11 +54,11 @@ class APACHE_GEODE_EXPORT CacheableString : m_str(std::move(value)), m_hashcode(0) { bool ascii = isAscii(m_str); - m_type = - m_str.length() > std::numeric_limits::max() - ? ascii ? DSCode::CacheableASCIIStringHuge - : DSCode::CacheableStringHuge - : ascii ? DSCode::CacheableASCIIString : DSCode::CacheableString; + m_type = m_str.length() > std::numeric_limits::max() + ? ascii ? DSCode::CacheableASCIIStringHuge + : DSCode::CacheableStringHuge + : ascii ? DSCode::CacheableASCIIString + : DSCode::CacheableString; } ~CacheableString() noexcept override = default; diff --git a/cppcache/include/geode/DataInput.hpp b/cppcache/include/geode/DataInput.hpp index 262bf58a80..23a771af8d 100644 --- a/cppcache/include/geode/DataInput.hpp +++ b/cppcache/include/geode/DataInput.hpp @@ -381,7 +381,7 @@ class APACHE_GEODE_EXPORT DataInput { } else { int8_t** tmpArray; int32_t* tmpLengtharr; - _GEODE_NEW(tmpArray, int8_t * [arrLen]); + _GEODE_NEW(tmpArray, int8_t* [arrLen]); _GEODE_NEW(tmpLengtharr, int32_t[arrLen]); for (int i = 0; i < arrLen; i++) { readBytes(&tmpArray[i], &tmpLengtharr[i]); diff --git a/tests/cpp/fwklib/FwkBB.hpp b/tests/cpp/fwklib/FwkBB.hpp index b13eba3857..12c1ccf915 100644 --- a/tests/cpp/fwklib/FwkBB.hpp +++ b/tests/cpp/fwklib/FwkBB.hpp @@ -50,18 +50,18 @@ namespace testframework { #define BB_RESULT_TAG "" // #define BB_END_TAG "" -#define BB_CLEAR_COMMAND "C" //"clear" -#define BB_DUMP_COMMAND "d" //"dump" -#define BB_GET_COMMAND "g" //"get" -#define BB_SET_COMMAND "s" //"set" -#define BB_ADD_COMMAND "A" //"add" -#define BB_SUBTRACT_COMMAND "S" //"subtract" -#define BB_INCREMENT_COMMAND "I" //"increment" -#define BB_DECREMENT_COMMAND "D" //"decrement" -#define BB_ZERO_COMMAND "z" //"zero" +#define BB_CLEAR_COMMAND "C" //"clear" +#define BB_DUMP_COMMAND "d" //"dump" +#define BB_GET_COMMAND "g" //"get" +#define BB_SET_COMMAND "s" //"set" +#define BB_ADD_COMMAND "A" //"add" +#define BB_SUBTRACT_COMMAND "S" //"subtract" +#define BB_INCREMENT_COMMAND "I" //"increment" +#define BB_DECREMENT_COMMAND "D" //"decrement" +#define BB_ZERO_COMMAND "z" //"zero" #define BB_SET_IF_GREATER_COMMAND "G" //"setIfGreater" -#define BB_SET_IF_LESS_COMMAND "L" //"setIfLess" -#define BB_SET_ACK_COMMAND "a" //"ack" +#define BB_SET_IF_LESS_COMMAND "L" //"setIfLess" +#define BB_SET_ACK_COMMAND "a" //"ack" // ---------------------------------------------------------------------------- From 1aea7c66386fe1ba1b39f3275f723ba4aa502db4 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 09:38:23 -0800 Subject: [PATCH 151/155] clang-tidy fixes --- cppcache/src/CacheXmlParser.cpp | 2 +- cppcache/src/Log.cpp | 48 +++++++++------------------------ 2 files changed, 13 insertions(+), 37 deletions(-) diff --git a/cppcache/src/CacheXmlParser.cpp b/cppcache/src/CacheXmlParser.cpp index 47c2382330..5cf74ea506 100644 --- a/cppcache/src/CacheXmlParser.cpp +++ b/cppcache/src/CacheXmlParser.cpp @@ -242,7 +242,7 @@ extern "C" void warningDebug(void *, const char *msg, ...) { char logmsg[2048]; va_list args; va_start(args, msg); - vsprintf(logmsg, msg, args); + std::vsnprintf(logmsg, sizeof(logmsg), msg, args); va_end(args); LOGWARN("SAX.warning during XML declarative client initialization: %s", logmsg); diff --git a/cppcache/src/Log.cpp b/cppcache/src/Log.cpp index 7b8c2ad40d..01f0d77d66 100644 --- a/cppcache/src/Log.cpp +++ b/cppcache/src/Log.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include #include #include @@ -33,7 +34,6 @@ #include #include -#include #include #include "../internal/hacks/AceThreadId.h" @@ -302,26 +302,19 @@ void Log::init(LogLevel level, const char* logFileName, int32_t logFileLimit, g_rollIndex++, extName.c_str()); bool rollFileNameGot = false; while (!rollFileNameGot) { - FILE* checkFile = fopen(rollFile, "r"); - if (checkFile != nullptr) { + if (auto checkFile = fopen(rollFile, "r")) { fclose(checkFile); - checkFile = nullptr; std::snprintf(rollFile, 1024, "%s%c%s-%d.%s", logsdirname.c_str(), ACE_DIRECTORY_SEPARATOR_CHAR, fnameBeforeExt.c_str(), g_rollIndex++, extName.c_str()); } else { rollFileNameGot = true; } - /* adongre - * CID 28999: Use after free (USE_AFTER_FREE) - */ - if (checkFile != nullptr) fclose(existingFile); } // retry some number of times before giving up when file is busy etc. - int renameResult = -1; int maxTries = 10; while (maxTries-- > 0) { - renameResult = ACE_OS::rename(g_logFileWithExt->c_str(), rollFile); + auto renameResult = ACE_OS::rename(g_logFileWithExt->c_str(), rollFile); if (renameResult >= 0) { break; } @@ -338,7 +331,6 @@ void Log::init(LogLevel level, const char* logFileName, int32_t logFileLimit, } if (existingFile != nullptr) { fclose(existingFile); - existingFile = nullptr; } } else if (g_logFile) { delete g_logFile; @@ -351,10 +343,7 @@ void Log::init(LogLevel level, const char* logFileName, int32_t logFileLimit, void Log::close() { std::lock_guard guard(g_logMutex); - std::string oldfile; - if (g_logFile) { - oldfile = *g_logFile; delete g_logFile; g_logFile = nullptr; } @@ -417,8 +406,7 @@ void Log::writeBanner() { } int numchars = 0; - const char* pch = nullptr; - pch = strchr(bannertext.c_str(), '\n'); + auto pch = strchr(bannertext.c_str(), '\n'); while (pch != nullptr) { pch = strchr(pch + 1, '\n'); numchars += 2; @@ -879,16 +867,11 @@ LogFn::~LogFn() { // var arg logging routines. -#ifdef _WIN32 -#define vsnprintf _vsnprintf -#endif - void LogVarargs::debug(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Debug, msg); va_end(argp); } @@ -897,8 +880,7 @@ void LogVarargs::error(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Error, msg); va_end(argp); } @@ -907,8 +889,7 @@ void LogVarargs::warn(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Warning, msg); va_end(argp); } @@ -917,8 +898,7 @@ void LogVarargs::info(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Info, msg); va_end(argp); } @@ -927,8 +907,7 @@ void LogVarargs::config(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Config, msg); va_end(argp); } @@ -937,8 +916,7 @@ void LogVarargs::fine(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Fine, msg); va_end(argp); } @@ -947,8 +925,7 @@ void LogVarargs::finer(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Finer, msg); va_end(argp); } @@ -957,8 +934,7 @@ void LogVarargs::finest(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); - vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); - /* win doesn't guarantee termination */ msg[_GF_MSG_LIMIT - 1] = '\0'; + std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Finest, msg); va_end(argp); } From 0f168b45cc43754d7be2274360178f7676758a3b Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 09:45:22 -0800 Subject: [PATCH 152/155] clang-tidy fixes --- cppcache/src/CacheXmlParser.cpp | 1 + cppcache/src/Log.cpp | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/cppcache/src/CacheXmlParser.cpp b/cppcache/src/CacheXmlParser.cpp index 5cf74ea506..0d72b443e3 100644 --- a/cppcache/src/CacheXmlParser.cpp +++ b/cppcache/src/CacheXmlParser.cpp @@ -242,6 +242,7 @@ extern "C" void warningDebug(void *, const char *msg, ...) { char logmsg[2048]; va_list args; va_start(args, msg); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(logmsg, sizeof(logmsg), msg, args); va_end(args); LOGWARN("SAX.warning during XML declarative client initialization: %s", diff --git a/cppcache/src/Log.cpp b/cppcache/src/Log.cpp index 01f0d77d66..93d4d6b6ca 100644 --- a/cppcache/src/Log.cpp +++ b/cppcache/src/Log.cpp @@ -871,6 +871,7 @@ void LogVarargs::debug(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Debug, msg); va_end(argp); @@ -880,6 +881,7 @@ void LogVarargs::error(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Error, msg); va_end(argp); @@ -889,6 +891,7 @@ void LogVarargs::warn(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Warning, msg); va_end(argp); @@ -898,6 +901,7 @@ void LogVarargs::info(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Info, msg); va_end(argp); @@ -907,6 +911,7 @@ void LogVarargs::config(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Config, msg); va_end(argp); @@ -916,6 +921,7 @@ void LogVarargs::fine(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Fine, msg); va_end(argp); @@ -925,6 +931,7 @@ void LogVarargs::finer(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Finer, msg); va_end(argp); @@ -934,6 +941,7 @@ void LogVarargs::finest(const char* fmt, ...) { char msg[_GF_MSG_LIMIT] = {0}; va_list argp; va_start(argp, fmt); + // NOLINTNEXTLINE(clang-analyzer-valist.Uninitialized): clang-tidy bug std::vsnprintf(msg, _GF_MSG_LIMIT, fmt, argp); Log::put(LogLevel::Finest, msg); va_end(argp); From 1670c7b69340f3a0d9291125a7fa80f4d6428c94 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 10:14:51 -0800 Subject: [PATCH 153/155] clang-tidy fixes --- cppcache/integration-test/testSerialization.cpp | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/cppcache/integration-test/testSerialization.cpp b/cppcache/integration-test/testSerialization.cpp index 3c95dddebe..f898b26314 100644 --- a/cppcache/integration-test/testSerialization.cpp +++ b/cppcache/integration-test/testSerialization.cpp @@ -18,8 +18,6 @@ #include #include -#include - #include "fw_dunit.hpp" #include "ThinClientHelper.hpp" @@ -39,8 +37,6 @@ using apache::geode::client::DataOutput; using apache::geode::client::DataSerializable; int32_t g_classIdToReturn = 0x04; -int32_t g_classIdToReturn2 = 0x1234; -int32_t g_classIdToReturn4 = 0x123456; template std::shared_ptr duplicate(const std::shared_ptr &orig) { @@ -103,6 +99,7 @@ class OtherType : public DataSerializable { ot->m_struct.b = (i % 2 == 0) ? true : false; ot->m_struct.c = static_cast(65) + i; ot->m_struct.d = ((2.0) * static_cast(i)); + ot->m_struct.e = (static_cast(i) << 32) + i; printf("Created OtherType: %d, %s, %c, %e\n", ot->m_struct.a, ot->m_struct.b ? "true" : "false", ot->m_struct.c, ot->m_struct.d); From ac9465d38c9d808e28709ec98b3cffddc4fbae75 Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Wed, 2 Dec 2020 15:58:45 -0800 Subject: [PATCH 154/155] Test PR 1.1 --- CMakeLists.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 60fd2edb05..cda32a6eda 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -403,3 +403,5 @@ if (USE_RAT) COMMENT "Checking source with Apache Rat." ) endif() + +message(FATAL_ERROR "this is a bad PR") From 77778df89b225410f393459a7ed899befe84308e Mon Sep 17 00:00:00 2001 From: Jacob Barrett Date: Sat, 2 Jan 2021 13:31:39 -0800 Subject: [PATCH 155/155] Update CMakeLists.txt --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index cda32a6eda..16a0288c44 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -404,4 +404,4 @@ if (USE_RAT) ) endif() -message(FATAL_ERROR "this is a bad PR") +#message(FATAL_ERROR "this is a bad PR")