diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index bef0203832..3a712dfd62 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,6 +1,12 @@
 name: CI
 
-on: [push, pull_request]
+on:
+  push:
+    branches:
+      - main
+      - 1.*
+      - 2.*
+  pull_request:
 
 env:
   GRADLE_OPTS: -Dhttp.keepAlive=false
@@ -18,7 +24,7 @@ jobs:
         java-version: 17
 
     - name: Checkout security
-      uses: actions/checkout@v2
+      uses: actions/checkout@v4
 
     - name: Generate list of tasks
       id: set-matrix
@@ -44,7 +50,7 @@ jobs:
         java-version: ${{ matrix.jdk }}
 
     - name: Checkout security
-      uses: actions/checkout@v2
+      uses: actions/checkout@v4
 
     - name: Build and Test
       uses: gradle/gradle-build-action@v2
@@ -52,7 +58,6 @@ jobs:
         cache-disabled: true
         arguments: |
           ${{ matrix.gradle_task }} -Dbuild.snapshot=false
-          -x test
 
     - name: Coverage
       uses: Wandalen/wretry.action@v1.3.0
@@ -62,7 +67,7 @@ jobs:
         action: codecov/codecov-action@v3
         with: |
           token: ${{ secrets.CODECOV_TOKEN }}
-          fail_ci_if_error: true
+          fail_ci_if_error: false
           files: ./build/reports/jacoco/test/jacocoTestReport.xml
 
     - uses: actions/upload-artifact@v3
@@ -76,7 +81,8 @@ jobs:
       if: always()
       run: echo "Check the artifact ${{ matrix.platform }}-JDK${{ matrix.jdk }}-reports for detailed test results"
 
-  backward-compatibility:
+  integration-tests:
+    name: integration-tests
     strategy:
       fail-fast: false
       matrix:
@@ -85,80 +91,105 @@ jobs:
     runs-on: ${{ matrix.platform }}
 
     steps:
-    - uses: actions/setup-java@v3
+    - name: Set up JDK for build and test
+      uses: actions/setup-java@v3
       with:
         distribution: temurin # Temurin is a distribution of adoptium
         java-version: ${{ matrix.jdk }}
 
-    - name: Checkout Security Repo
-      uses: actions/checkout@v2
+    - name: Checkout security
+      uses: actions/checkout@v4
 
-    - id: build-previous
-      uses: ./.github/actions/run-bwc-suite
+    - name: Build and Test
+      uses: gradle/gradle-build-action@v2
       with:
-        plugin-previous-branch: "2.10"
-        plugin-next-branch: "current_branch"
-        report-artifact-name: bwc-${{ matrix.platform }}-jdk${{ matrix.jdk }}
-        username: admin
-        password: admin
+        cache-disabled: true
+        arguments: |
+          integrationTest -Dbuild.snapshot=false
+
+  # backward-compatibility:
+  #   strategy:
+  #     fail-fast: false
+  #     matrix:
+  #       jdk: [11, 17]
+  #       platform: [ubuntu-latest, windows-latest]
+  #   runs-on: ${{ matrix.platform }}
+
+  #   steps:
+  #   - uses: actions/setup-java@v3
+  #     with:
+  #       distribution: temurin # Temurin is a distribution of adoptium
+  #       java-version: ${{ matrix.jdk }}
+
+  #   - name: Checkout Security Repo
+  #     uses: actions/checkout@v4
+
+  #   - id: build-previous
+  #     uses: ./.github/actions/run-bwc-suite
+  #     with:
+  #       plugin-previous-branch: "2.10"
+  #       plugin-next-branch: "current_branch"
+  #       report-artifact-name: bwc-${{ matrix.platform }}-jdk${{ matrix.jdk }}
+  #       username: admin
+  #       password: admin
 
   code-ql:
     runs-on: ubuntu-latest
     steps:
-    - uses: actions/checkout@v3
+    - uses: actions/checkout@v4
     - uses: actions/setup-java@v3
       with:
         distribution: temurin # Temurin is a distribution of adoptium
         java-version: 11
-    - uses: github/codeql-action/init@v1
+    - uses: github/codeql-action/init@v2
       with:
         languages: java
     - run: ./gradlew clean build -Dbuild.snapshot=false -x test
-    - uses: github/codeql-action/analyze@v1
-
-  build-artifact-names:
-    runs-on: ubuntu-latest
-    steps:
-    - uses: actions/checkout@v2
-
-    - uses: actions/setup-java@v3
-      with:
-        distribution: temurin # Temurin is a distribution of adoptium
-        java-version: 11
-
-    - run: |
-        security_plugin_version=$(./gradlew properties -q | grep -E '^version:' | awk '{print $2}')
-        security_plugin_version_no_snapshot=$(echo $security_plugin_version | sed 's/-SNAPSHOT//g')
-        security_plugin_version_only_number=$(echo $security_plugin_version_no_snapshot | cut -d- -f1)
-        test_qualifier=alpha2
-
-        echo "SECURITY_PLUGIN_VERSION=$security_plugin_version" >> $GITHUB_ENV
-        echo "SECURITY_PLUGIN_VERSION_NO_SNAPSHOT=$security_plugin_version_no_snapshot" >> $GITHUB_ENV
-        echo "SECURITY_PLUGIN_VERSION_ONLY_NUMBER=$security_plugin_version_only_number" >> $GITHUB_ENV
-        echo "TEST_QUALIFIER=$test_qualifier" >> $GITHUB_ENV
-
-    - run: |
-        echo ${{ env.SECURITY_PLUGIN_VERSION }}
-        echo ${{ env.SECURITY_PLUGIN_VERSION_NO_SNAPSHOT }}
-        echo ${{ env.SECURITY_PLUGIN_VERSION_ONLY_NUMBER }}
-        echo ${{ env.TEST_QUALIFIER }}
-
-    - run: ./gradlew clean assemble && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION }}.zip
-
-    - run: ./gradlew clean assemble -Dbuild.snapshot=false && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION_NO_SNAPSHOT }}.zip
-
-    - run: ./gradlew clean assemble -Dbuild.snapshot=false -Dbuild.version_qualifier=${{ env.TEST_QUALIFIER }} && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION_ONLY_NUMBER }}-${{ env.TEST_QUALIFIER }}.zip
-
-    - run: ./gradlew clean assemble -Dbuild.version_qualifier=${{ env.TEST_QUALIFIER }} && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION_ONLY_NUMBER }}-${{ env.TEST_QUALIFIER }}-SNAPSHOT.zip
-
-    - run:  |
-        ## EXISTING_OS_VERSION outputs the major version, example as 2
-        EXISTING_OS_VERSION=$(./gradlew properties | grep opensearch.version | cut -d':' -f2- | awk '{$1=$1};1' | cut -d '-' -f1 | cut -d '.' -f1)
-        ## INCREMENT_OS_VERSION in an increment of 1, example if EXISTING_OS_VERSION is 2, INCREMENT_OS_VERSION is 3
-        INCREMENT_OS_VERSION=$((++EXISTING_OS_VERSION))
-        ./gradlew clean updateVersion -DnewVersion=$INCREMENT_OS_VERSION.0.0-SNAPSHOT
-        test `./gradlew properties | grep opensearch.version | cut -d':' -f2- | awk '{$1=$1};1'` = $INCREMENT_OS_VERSION.0.0-SNAPSHOT
-
-    - name: List files in the build directory if there was an error
-      run: ls -al ./build/distributions/
-      if: failure()
+    - uses: github/codeql-action/analyze@v2
+
+  # build-artifact-names:
+  #   runs-on: ubuntu-latest
+  #   steps:
+  #   - uses: actions/checkout@v4
+
+  #   - uses: actions/setup-java@v3
+  #     with:
+  #       distribution: temurin # Temurin is a distribution of adoptium
+  #       java-version: 11
+
+  #   - run: |
+  #       security_plugin_version=$(./gradlew properties -q | grep -E '^version:' | awk '{print $2}')
+  #       security_plugin_version_no_snapshot=$(echo $security_plugin_version | sed 's/-SNAPSHOT//g')
+  #       security_plugin_version_only_number=$(echo $security_plugin_version_no_snapshot | cut -d- -f1)
+  #       test_qualifier=alpha2
+
+  #       echo "SECURITY_PLUGIN_VERSION=$security_plugin_version" >> $GITHUB_ENV
+  #       echo "SECURITY_PLUGIN_VERSION_NO_SNAPSHOT=$security_plugin_version_no_snapshot" >> $GITHUB_ENV
+  #       echo "SECURITY_PLUGIN_VERSION_ONLY_NUMBER=$security_plugin_version_only_number" >> $GITHUB_ENV
+  #       echo "TEST_QUALIFIER=$test_qualifier" >> $GITHUB_ENV
+
+  #   - run: |
+  #       echo ${{ env.SECURITY_PLUGIN_VERSION }}
+  #       echo ${{ env.SECURITY_PLUGIN_VERSION_NO_SNAPSHOT }}
+  #       echo ${{ env.SECURITY_PLUGIN_VERSION_ONLY_NUMBER }}
+  #       echo ${{ env.TEST_QUALIFIER }}
+
+  #   - run: ./gradlew clean assemble && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION }}.zip
+
+  #   - run: ./gradlew clean assemble -Dbuild.snapshot=false && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION_NO_SNAPSHOT }}.zip
+
+  #   - run: ./gradlew clean assemble -Dbuild.snapshot=false -Dbuild.version_qualifier=${{ env.TEST_QUALIFIER }} && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION_ONLY_NUMBER }}-${{ env.TEST_QUALIFIER }}.zip
+
+  #   - run: ./gradlew clean assemble -Dbuild.version_qualifier=${{ env.TEST_QUALIFIER }} && test -s ./build/distributions/opensearch-security-${{ env.SECURITY_PLUGIN_VERSION_ONLY_NUMBER }}-${{ env.TEST_QUALIFIER }}-SNAPSHOT.zip
+
+  #   - run:  |
+  #       ## EXISTING_OS_VERSION outputs the major version, example as 2
+  #       EXISTING_OS_VERSION=$(./gradlew properties | grep opensearch.version | cut -d':' -f2- | awk '{$1=$1};1' | cut -d '-' -f1 | cut -d '.' -f1)
+  #       ## INCREMENT_OS_VERSION in an increment of 1, example if EXISTING_OS_VERSION is 2, INCREMENT_OS_VERSION is 3
+  #       INCREMENT_OS_VERSION=$((++EXISTING_OS_VERSION))
+  #       ./gradlew clean updateVersion -DnewVersion=$INCREMENT_OS_VERSION.0.0-SNAPSHOT
+  #       test `./gradlew properties | grep opensearch.version | cut -d':' -f2- | awk '{$1=$1};1'` = $INCREMENT_OS_VERSION.0.0-SNAPSHOT
+
+  #   - name: List files in the build directory if there was an error
+  #     run: ls -al ./build/distributions/
+  #     if: failure()
diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml
index 1490e1d7f6..1d904020ca 100644
--- a/.github/workflows/maven-publish.yml
+++ b/.github/workflows/maven-publish.yml
@@ -21,8 +21,8 @@ jobs:
         with:
           distribution: temurin # Temurin is a distribution of adoptium
           java-version: 11
-      - uses: actions/checkout@v3
-      - uses: aws-actions/configure-aws-credentials@v1
+      - uses: actions/checkout@v4
+      - uses: aws-actions/configure-aws-credentials@v4
         with:
           role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }}
           aws-region: us-east-1
diff --git a/build.gradle b/build.gradle
index 8204e0be90..726596b16f 100644
--- a/build.gradle
+++ b/build.gradle
@@ -26,7 +26,7 @@ buildscript {
 
         common_utils_version = System.getProperty("common_utils.version", '2.9.0.0-SNAPSHOT')
         kafka_version  = '3.5.1'
-        apache_cxf_version = '4.0.2'
+        apache_cxf_version = '4.0.3'
         open_saml_version = '3.4.5'
         one_login_java_saml = '2.9.0'
         jjwt_version = '0.11.5'
@@ -434,6 +434,14 @@ configurations {
             force "com.github.luben:zstd-jni:${versions.zstd}"
             force "org.xerial.snappy:snappy-java:1.1.10.3"
             force "com.google.guava:guava:${guava_version}"
+
+            // TODO: Seems like this should be removable
+            force "org.apache.httpcomponents:httpclient-cache:4.5.13"
+            force "org.apache.httpcomponents:httpclient:4.5.13"
+            force "org.apache.httpcomponents:fluent-hc:4.5.13"
+            force "org.apache.httpcomponents:httpcore:4.4.16"
+            force "org.apache.httpcomponents:httpcore-nio:4.4.16"
+            force "org.apache.httpcomponents:httpasyncclient:4.1.5"
         }
     }
 
@@ -501,7 +509,7 @@ dependencies {
         exclude group: "com.google.code.gson", module: "gson"
         exclude group: "org.json", module: "json"
     }
-    implementation 'com.github.wnameless.json:json-flattener:0.16.5'
+    implementation 'com.github.wnameless.json:json-flattener:0.16.6'
     // JSON patch
     implementation 'com.flipkart.zjsonpatch:zjsonpatch:0.4.14'
     implementation 'org.apache.commons:commons-collections4:4.4'
@@ -566,10 +574,10 @@ dependencies {
     runtimeOnly 'org.codehaus.woodstox:stax2-api:4.2.1'
     runtimeOnly "org.glassfish.jaxb:txw2:${jaxb_version}"
     runtimeOnly 'com.fasterxml.woodstox:woodstox-core:6.5.1'
-    runtimeOnly 'org.apache.ws.xmlschema:xmlschema-core:2.2.5'
+    runtimeOnly 'org.apache.ws.xmlschema:xmlschema-core:2.3.1'
     runtimeOnly 'org.apache.santuario:xmlsec:2.3.3'
     runtimeOnly "com.github.luben:zstd-jni:${versions.zstd}"
-    runtimeOnly 'org.checkerframework:checker-qual:3.36.0'
+    runtimeOnly 'org.checkerframework:checker-qual:3.38.0'
     runtimeOnly "org.bouncycastle:bcpkix-jdk15to18:${versions.bouncycastle}"
     runtimeOnly 'org.scala-lang.modules:scala-java8-compat_3:1.0.2'
 
@@ -595,7 +603,7 @@ dependencies {
     testImplementation "org.apache.kafka:kafka_2.13:${kafka_version}:test"
     testImplementation "org.apache.kafka:kafka-clients:${kafka_version}:test"
     testImplementation 'org.springframework.kafka:spring-kafka-test:2.9.6'
-    testImplementation 'org.springframework:spring-beans:5.3.20'
+    testImplementation 'org.springframework:spring-beans:5.3.30'
     testImplementation 'org.junit.jupiter:junit-jupiter:5.10.0'
     testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.0'
     // Only osx-x86_64, osx-aarch_64, linux-x86_64, linux-aarch_64, windows-x86_64 are available
@@ -639,6 +647,11 @@ dependencies {
         exclude(group: 'org.hamcrest', module: 'hamcrest')
     }
     integrationTestImplementation 'com.unboundid:unboundid-ldapsdk:4.0.14'
+    integrationTestImplementation "org.apache.httpcomponents:httpclient-cache:4.5.13"
+    integrationTestImplementation "org.apache.httpcomponents:httpclient:4.5.13"
+    integrationTestImplementation "org.apache.httpcomponents:fluent-hc:4.5.13"
+    integrationTestImplementation "org.apache.httpcomponents:httpcore:4.4.13"
+    integrationTestImplementation "org.apache.httpcomponents:httpasyncclient:4.1.5"
 
     //Checkstyle
     checkstyle 'com.puppycrawl.tools:checkstyle:10.12.1'
diff --git a/release-notes/opensearch-security.release-notes-2.10.0.0.md b/release-notes/opensearch-security.release-notes-2.10.0.0.md
new file mode 100644
index 0000000000..19548d5930
--- /dev/null
+++ b/release-notes/opensearch-security.release-notes-2.10.0.0.md
@@ -0,0 +1,76 @@
+## 2023-08-31 Version 2.10.0.0
+
+Compatible with OpenSearch 2.10.0
+
+### Enhancements
+* Add .plugins-ml-config to the demo configuration system indices ([#2993](https://github.com/opensearch-project/security/pull/2993))
+* Add workflow cluster permissions to alerting roles ([#2994](https://github.com/opensearch-project/security/pull/2994))
+* Include password regex for Dashboardsinfo to display to users ([#2999](https://github.com/opensearch-project/security/pull/2999))
+* Add geospatial ip2geo to the demo configuration system indices and roles ([#3051](https://github.com/opensearch-project/security/pull/3051))
+* Make invalid password message clearer ([#3057](https://github.com/opensearch-project/security/pull/3057))
+* Service Accounts password is randomly generated ([#3077](https://github.com/opensearch-project/security/pull/3077))
+* Exclude sensitive info from the jackson serialization stacktraces ([#3195](https://github.com/opensearch-project/security/pull/3195))
+* Prevent raw request body as output in serialization error messages ([#3205](https://github.com/opensearch-project/security/pull/3205))
+* Command cat/indices will filter results per the Do Not Fail On Forbidden setting ([#3236](https://github.com/opensearch-project/security/pull/3236))
+* Generate new demo certs with IPv6 loopback added to SAN in node certificate ([#3268](https://github.com/opensearch-project/security/pull/3268))
+* System index permissions ([#2887](https://github.com/opensearch-project/security/pull/2887))
+
+
+### Bug Fixes
+* Prevent raw request body as output in serialization error messages ([#3205](https://github.com/opensearch-project/security/pull/3205))
+* Prevent flaky behavior when determining if an request will be executed on the current node. ([#3066](https://github.com/opensearch-project/security/pull/3066))
+* Resolve a class of ConcurrentModificationException from during bulk requests ([#3094](https://github.com/opensearch-project/security/pull/3094))
+* Fix Document GET with DLS terms query ([#3136](https://github.com/opensearch-project/security/pull/3136))
+* Send log messages to log4j systems instead of system out / error ([#3231](https://github.com/opensearch-project/security/pull/3231))
+* Fix roles verification for roles mapping and internal users ([#3278](https://github.com/opensearch-project/security/pull/3278))
+* Prevent raw request body as output in serialization error messages ([#3205](https://github.com/opensearch-project/security/pull/3205))
+* Fix permissions issues while reading keys in PKCS#1 format ([#3289](https://github.com/opensearch-project/security/pull/3289))
+
+### Maintenance
+* [Build Break] Update imports for files refactored in core PR #8157 ([#3003](https://github.com/opensearch-project/security/pull/3003))
+* [Build Break] Fix build after Lucene upgrade and breaking XContentFactory changes ([#3069](https://github.com/opensearch-project/security/pull/3069))
+* [Build Break] Update CircuitBreakerService and LifecycleComponent after core refactor in #9006 ([#3082](https://github.com/opensearch-project/security/pull/3082))
+* [Build Break] React to changes in ActionListener and ActionResponse from #9082 ([#3153](https://github.com/opensearch-project/security/pull/3153))
+* [Build Break] Disable gradlew build cache to ensure most up-to-date dependencies ([#3186](https://github.com/opensearch-project/security/pull/3186))
+* Bump com.carrotsearch.randomizedtesting:randomizedtesting-runner from 2.7.1 to 2.8.1 ([#3109](https://github.com/opensearch-project/security/pull/3109))
+* Bump com.diffplug.spotless from 6.19.0 to 6.21.0 ([#3108](https://github.com/opensearch-project/security/pull/3108))
+* Bump com.fasterxml.woodstox:woodstox-core from 6.4.0 to 6.5.1 ([#3148](https://github.com/opensearch-project/security/pull/3148))
+* Bump com.github.spotbugs from 5.0.14 to 5.1.3 ([#3251](https://github.com/opensearch-project/security/pull/3251))
+* Bump com.github.wnameless.json:json-base from 2.4.0 to 2.4.2 ([#3062](https://github.com/opensearch-project/security/pull/3062))
+* Bump com.github.wnameless.json:json-flattener from 0.16.4 to 0.16.5 ([#3296](https://github.com/opensearch-project/security/pull/3296))
+* Bump com.google.errorprone:error_prone_annotations from 2.3.4 to 2.20.0 ([#3023](https://github.com/opensearch-project/security/pull/3023))
+* Bump com.google.guava:guava from 32.1.1-jre to 32.1.2-jre ([#3149](https://github.com/opensearch-project/security/pull/3149))
+* Bump commons-io:commons-io from 2.11.0 to 2.13.0 ([#3074](https://github.com/opensearch-project/security/pull/3074))
+* Bump com.netflix.nebula.ospackage from 11.1.0 to 11.3.0 ([#3023](https://github.com/opensearch-project/security/pull/3023))
+* Bump com.nulab-inc:zxcvbn from 1.7.0 to 1.8.0 ([#3023](https://github.com/opensearch-project/security/pull/3023))
+* Bump com.unboundid:unboundid-ldapsdk from 4.0.9 to 4.0.14 ([#3143](https://github.com/opensearch-project/security/pull/3143))
+* Bump io.dropwizard.metrics:metrics-core from 3.1.2 to 4.2.19 ([#3073](https://github.com/opensearch-project/security/pull/3073))
+* Bump kafka_version from 3.5.0 to 3.5.1 ([#3041](https://github.com/opensearch-project/security/pull/3041))
+* Bump net.minidev:json-smart from 2.4.11 to 2.5.0 ([#3120](https://github.com/opensearch-project/security/pull/3120))
+* Bump org.apache.camel:camel-xmlsecurity from 3.14.2 to 3.21.0 ([#3023](https://github.com/opensearch-project/security/pull/3023))
+* Bump org.apache.santuario:xmlsec from 2.2.3 to 2.3.3 ([#3210](https://github.com/opensearch-project/security/pull/3210))
+* Bump org.checkerframework:checker-qual from 3.5.0 to 3.36.0 ([#3023](https://github.com/opensearch-project/security/pull/3023))
+* Bump org.cryptacular:cryptacular from 1.2.4 to 1.2.5 ([#3071](https://github.com/opensearch-project/security/pull/3071))
+* Bump org.gradle.test-retry from 1.5.2 to 1.5.4 ([#3072](https://github.com/opensearch-project/security/pull/3072))
+* Bump org.junit.jupiter:junit-jupiter from 5.8.2 to 5.10.0 ([#3146](https://github.com/opensearch-project/security/pull/3146))
+* Bump org.ow2.asm:asm from 9.1 to 9.5 ([#3121](https://github.com/opensearch-project/security/pull/3121))
+* Bump org.scala-lang:scala-library from 2.13.9 to 2.13.11 ([#3119](https://github.com/opensearch-project/security/pull/3119))
+* Bump org.slf4j:slf4j-api from 1.7.30 to 1.7.36 ([#3249](https://github.com/opensearch-project/security/pull/3249))
+* Bump org.xerial.snappy:snappy-java from 1.1.10.1 to 1.1.10.3 ([#3106](https://github.com/opensearch-project/security/pull/3106))
+* Bump actions/create-release from 1.0.0 to 1.1.4 ([#3141](https://github.com/opensearch-project/security/pull/3141))
+* Bump actions/setup-java from 1 to 3 ([#3142](https://github.com/opensearch-project/security/pull/3142))
+* Bump actions/upload-release-asset from 1.0.1 to 1.0.2 ([#3144](https://github.com/opensearch-project/security/pull/3144))
+* Bump fernandrone/linelint from 0.0.4 to 0.0.6 ([#3211](https://github.com/opensearch-project/security/pull/3211))
+* Bump tibdex/github-app-token from 1.5.0 to 1.8.0 ([#3147](https://github.com/opensearch-project/security/pull/3147))
+* Remove log spam for files that are cleaned up ([#3118](https://github.com/opensearch-project/security/pull/3118))
+* Updates integTestRemote task to dynamically fetch common-utils version from build.gradle ([#3122](https://github.com/opensearch-project/security/pull/3122))
+* Switch CodeQL to assemble artifacts using the same build as the rest of CI ([#3132](https://github.com/opensearch-project/security/pull/3132))
+* Only run the backport job on merged pull requests ([#3134](https://github.com/opensearch-project/security/pull/3134))
+* Add code coverage exclusions on false positives ([#3196](https://github.com/opensearch-project/security/pull/3196))
+* Enable jarhell check ([#3227](https://github.com/opensearch-project/security/pull/3227))
+* Retry code coverage upload on failure ([#3242](https://github.com/opensearch-project/security/pull/3242))
+* [Refactor] Adopt request builder patterns for SecurityRestApiActions for consistency and clarity ([#3123](https://github.com/opensearch-project/security/pull/3123))
+* [Refactor] Remove json-path from deps and use JsonPointer instead ([#3262](https://github.com/opensearch-project/security/pull/3262))
+* Use version of org.apache.commons:commons-lang3 defined in core ([#3306](https://github.com/opensearch-project/security/pull/3306))
+* Fix checkstyle #3283
+* Demo Configuration changes ([#3330](https://github.com/opensearch-project/security/pull/3330))
\ No newline at end of file
diff --git a/src/integrationTest/java/org/opensearch/common/logging/NodeAndClusterIdConverter.java b/src/integrationTest/java/org/opensearch/common/logging/NodeAndClusterIdConverter.java
new file mode 100644
index 0000000000..4aba6c976b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/common/logging/NodeAndClusterIdConverter.java
@@ -0,0 +1,29 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.common.logging;
+
+/**
+* Class uses to override OpenSearch NodeAndClusterIdConverter Log4j2 plugin in order to disable plugin and limit number of
+* warn messages like "...ApplierService#updateTask][T#1] WARN ClusterApplierService:628 - failed to notify ClusterStateListener..."
+* during tests execution.
+*
+* The class is rather a temporary solution and the real one should be developed in scope of:
+* https://github.com/opensearch-project/OpenSearch/pull/4322
+*/
+import org.apache.logging.log4j.core.LogEvent;
+
+class NodeAndClusterIdConverter {
+
+    public NodeAndClusterIdConverter() {}
+
+    public static void setNodeIdAndClusterId(String nodeId, String clusterUUID) {}
+
+    public void format(LogEvent event, StringBuilder toAppendTo) {}
+}
diff --git a/src/integrationTest/java/org/opensearch/node/PluginAwareNode.java b/src/integrationTest/java/org/opensearch/node/PluginAwareNode.java
new file mode 100644
index 0000000000..53e44496ca
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/node/PluginAwareNode.java
@@ -0,0 +1,52 @@
+/*
+* Copyright 2015-2018 _floragunn_ GmbH
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.node;
+
+import java.util.Arrays;
+import java.util.Collections;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.plugins.Plugin;
+
+public class PluginAwareNode extends Node {
+
+    private final boolean clusterManagerEligible;
+
+    @SafeVarargs
+    public PluginAwareNode(boolean clusterManagerEligible, final Settings preparedSettings, final Class<? extends Plugin>... plugins) {
+        super(
+            InternalSettingsPreparer.prepareEnvironment(preparedSettings, Collections.emptyMap(), null, () -> System.getenv("HOSTNAME")),
+            Arrays.asList(plugins),
+            true
+        );
+        this.clusterManagerEligible = clusterManagerEligible;
+    }
+
+    public boolean isClusterManagerEligible() {
+        return clusterManagerEligible;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/ConfigurationFiles.java b/src/integrationTest/java/org/opensearch/security/ConfigurationFiles.java
new file mode 100644
index 0000000000..287bc139b1
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/ConfigurationFiles.java
@@ -0,0 +1,60 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Objects;
+
+class ConfigurationFiles {
+
+    public static void createRoleMappingFile(File destination) {
+        String resource = "roles_mapping.yml";
+        copyResourceToFile(resource, destination);
+    }
+
+    public static Path createConfigurationDirectory() {
+        try {
+            Path tempDirectory = Files.createTempDirectory("test-security-config");
+            String[] configurationFiles = {
+                "config.yml",
+                "action_groups.yml",
+                "config.yml",
+                "internal_users.yml",
+                "roles.yml",
+                "roles_mapping.yml",
+                "security_tenants.yml",
+                "tenants.yml" };
+            for (String fileName : configurationFiles) {
+                Path configFileDestination = tempDirectory.resolve(fileName);
+                copyResourceToFile(fileName, configFileDestination.toFile());
+            }
+            return tempDirectory.toAbsolutePath();
+        } catch (IOException ex) {
+            throw new RuntimeException("Cannot create directory with security plugin configuration.", ex);
+        }
+    }
+
+    private static void copyResourceToFile(String resource, File destination) {
+        try (InputStream input = ConfigurationFiles.class.getClassLoader().getResourceAsStream(resource)) {
+            Objects.requireNonNull(input, "Cannot find source resource " + resource);
+            try (OutputStream output = new FileOutputStream(destination)) {
+                input.transferTo(output);
+            }
+        } catch (IOException e) {
+            throw new RuntimeException("Cannot create file with security plugin configuration", e);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/CrossClusterSearchTests.java b/src/integrationTest/java/org/opensearch/security/CrossClusterSearchTests.java
new file mode 100644
index 0000000000..86d27efa87
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/CrossClusterSearchTests.java
@@ -0,0 +1,455 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.commons.lang3.tuple.Pair;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.client.Client;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.SearchRequestFactory;
+import org.opensearch.test.framework.cluster.TestRestClient;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.core.rest.RestStatus.FORBIDDEN;
+import static org.opensearch.security.Song.ARTIST_FIRST;
+import static org.opensearch.security.Song.FIELD_ARTIST;
+import static org.opensearch.security.Song.FIELD_GENRE;
+import static org.opensearch.security.Song.FIELD_LYRICS;
+import static org.opensearch.security.Song.FIELD_STARS;
+import static org.opensearch.security.Song.FIELD_TITLE;
+import static org.opensearch.security.Song.GENRE_JAZZ;
+import static org.opensearch.security.Song.GENRE_ROCK;
+import static org.opensearch.security.Song.QUERY_TITLE_MAGNUM_OPUS;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.Song.TITLE_MAGNUM_OPUS;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.queryStringQueryRequest;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.statusException;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfTotalHitsIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitContainsFieldWithValue;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitDoesNotContainField;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentWithId;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentsInAnyOrder;
+
+/**
+* This is a parameterized test so that one test class is used to test security plugin behaviour when <code>ccsMinimizeRoundtrips</code>
+* option is enabled or disabled. Method {@link #parameters()} is a source of parameters values.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class CrossClusterSearchTests {
+
+    private static final String SONG_INDEX_NAME = "song_lyrics";
+
+    private static final String PROHIBITED_SONG_INDEX_NAME = "prohibited_song_lyrics";
+
+    public static final String REMOTE_CLUSTER_NAME = "ccsRemote";
+    public static final String REMOTE_SONG_INDEX = REMOTE_CLUSTER_NAME + ":" + SONG_INDEX_NAME;
+
+    public static final String SONG_ID_1R = "remote-00001";
+    public static final String SONG_ID_2L = "local-00002";
+    public static final String SONG_ID_3R = "remote-00003";
+    public static final String SONG_ID_4L = "local-00004";
+    public static final String SONG_ID_5R = "remote-00005";
+    public static final String SONG_ID_6R = "remote-00006";
+
+    private static final Role LIMITED_ROLE = new Role("limited_role").indexPermissions(
+        "indices:data/read/search",
+        "indices:admin/shards/search_shards"
+    ).on(SONG_INDEX_NAME, "user-${user.name}-${attr.internal.type}");
+
+    private static final Role DLS_ROLE_ROCK = new Role("dls_role_rock").indexPermissions(
+        "indices:data/read/search",
+        "indices:data/read/get",
+        "indices:admin/shards/search_shards"
+    ).dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_GENRE, GENRE_ROCK)).on(SONG_INDEX_NAME);
+
+    private static final Role DLS_ROLE_JAZZ = new Role("dls_role_jazz").indexPermissions(
+        "indices:data/read/search",
+        "indices:data/read/get",
+        "indices:admin/shards/search_shards"
+    ).dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_GENRE, GENRE_JAZZ)).on(SONG_INDEX_NAME);
+
+    private static final Role FLS_EXCLUDE_LYRICS_ROLE = new Role("fls_exclude_lyrics_role").indexPermissions(
+        "indices:data/read/search",
+        "indices:data/read/get",
+        "indices:admin/shards/search_shards"
+    ).fls("~" + FIELD_LYRICS).on(SONG_INDEX_NAME);
+
+    private static final Role FLS_INCLUDE_TITLE_ROLE = new Role("fls_include_title_role").indexPermissions(
+        "indices:data/read/search",
+        "indices:data/read/get",
+        "indices:admin/shards/search_shards"
+    ).fls(FIELD_TITLE).on(SONG_INDEX_NAME);
+
+    public static final String TYPE_ATTRIBUTE = "type";
+
+    private static final User ADMIN_USER = new User("admin").roles(ALL_ACCESS).attr(TYPE_ATTRIBUTE, "administrative");
+    private static final User LIMITED_USER = new User("limited_user").attr(TYPE_ATTRIBUTE, "personal");
+
+    private static final User FLS_INCLUDE_TITLE_USER = new User("fls_include_title_user");
+
+    private static final User FLS_EXCLUDE_LYRICS_USER = new User("fls_exclude_lyrics_user");
+
+    private static final User DLS_USER_ROCK = new User("dls-user-rock");
+
+    private static final User DLS_USER_JAZZ = new User("dls-user-jazz");
+
+    public static final String LIMITED_USER_INDEX_NAME = "user-" + LIMITED_USER.getName() + "-" + LIMITED_USER.getAttribute(TYPE_ATTRIBUTE);
+    public static final String ADMIN_USER_INDEX_NAME = "user-" + ADMIN_USER.getName() + "-" + ADMIN_USER.getAttribute(TYPE_ATTRIBUTE);
+
+    private static final TestCertificates TEST_CERTIFICATES = new TestCertificates();
+
+    private final boolean ccsMinimizeRoundtrips;
+
+    public static final String PLUGINS_SECURITY_RESTAPI_ROLES_ENABLED = "plugins.security.restapi.roles_enabled";
+    @ClassRule
+    public static final LocalCluster remoteCluster = new LocalCluster.Builder().certificates(TEST_CERTIFICATES)
+        .clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .clusterName(REMOTE_CLUSTER_NAME)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .roles(LIMITED_ROLE, DLS_ROLE_ROCK, DLS_ROLE_JAZZ, FLS_EXCLUDE_LYRICS_ROLE, FLS_INCLUDE_TITLE_ROLE)
+        .users(ADMIN_USER)
+        .build();
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().certificates(TEST_CERTIFICATES)
+        .clusterManager(ClusterManager.SINGLE_REMOTE_CLIENT)
+        .anonymousAuth(false)
+        .clusterName("ccsLocal")
+        .nodeSettings(Map.of(PLUGINS_SECURITY_RESTAPI_ROLES_ENABLED, List.of("user_" + ADMIN_USER.getName() + "__" + ALL_ACCESS.getName())))
+        .remote(REMOTE_CLUSTER_NAME, remoteCluster)
+        .roles(LIMITED_ROLE, DLS_ROLE_ROCK, DLS_ROLE_JAZZ, FLS_EXCLUDE_LYRICS_ROLE, FLS_INCLUDE_TITLE_ROLE)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER, LIMITED_USER, DLS_USER_ROCK, DLS_USER_JAZZ, FLS_INCLUDE_TITLE_USER, FLS_EXCLUDE_LYRICS_USER)
+        .build();
+
+    @ParametersFactory(shuffle = false)
+    public static Iterable<Object[]> parameters() {
+        return List.of(new Object[] { true }, new Object[] { false });
+    }
+
+    public CrossClusterSearchTests(Boolean ccsMinimizeRoundtrips) {
+        this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips;
+    }
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = remoteCluster.getInternalNodeClient()) {
+            client.prepareIndex(SONG_INDEX_NAME).setId(SONG_ID_1R).setRefreshPolicy(IMMEDIATE).setSource(SONGS[0].asMap()).get();
+            client.prepareIndex(SONG_INDEX_NAME).setId(SONG_ID_6R).setRefreshPolicy(IMMEDIATE).setSource(SONGS[5].asMap()).get();
+            client.prepareIndex(PROHIBITED_SONG_INDEX_NAME).setId(SONG_ID_3R).setRefreshPolicy(IMMEDIATE).setSource(SONGS[1].asMap()).get();
+            client.prepareIndex(LIMITED_USER_INDEX_NAME).setId(SONG_ID_5R).setRefreshPolicy(IMMEDIATE).setSource(SONGS[4].asMap()).get();
+        }
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.prepareIndex(SONG_INDEX_NAME).setId(SONG_ID_2L).setRefreshPolicy(IMMEDIATE).setSource(SONGS[2].asMap()).get();
+            client.prepareIndex(PROHIBITED_SONG_INDEX_NAME).setId(SONG_ID_4L).setRefreshPolicy(IMMEDIATE).setSource(SONGS[3].asMap()).get();
+        }
+        try (TestRestClient client = cluster.getRestClient(ADMIN_USER)) {
+            client.assignRoleToUser(LIMITED_USER.getName(), LIMITED_ROLE.getName()).assertStatusCode(200);
+            client.assignRoleToUser(DLS_USER_ROCK.getName(), DLS_ROLE_ROCK.getName()).assertStatusCode(200);
+            client.assignRoleToUser(DLS_USER_JAZZ.getName(), DLS_ROLE_JAZZ.getName()).assertStatusCode(200);
+            client.assignRoleToUser(FLS_INCLUDE_TITLE_USER.getName(), FLS_INCLUDE_TITLE_ROLE.getName()).assertStatusCode(200);
+            client.assignRoleToUser(FLS_EXCLUDE_LYRICS_USER.getName(), FLS_EXCLUDE_LYRICS_ROLE.getName()).assertStatusCode(200);
+        }
+    }
+
+    @Test
+    public void shouldFindDocumentOnRemoteCluster_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_SONG_INDEX);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(2));
+            assertThat(response, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, SONG_ID_1R));
+            assertThat(response, searchHitsContainDocumentWithId(1, SONG_INDEX_NAME, SONG_ID_6R));
+        }
+    }
+
+    private SearchRequest searchAll(String indexName) {
+        SearchRequest searchRequest = SearchRequestFactory.searchAll(indexName);
+        searchRequest.setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips);
+        return searchRequest;
+    }
+
+    @Test
+    public void shouldFindDocumentOnRemoteCluster_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":" + PROHIBITED_SONG_INDEX_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentOnRemoteClustersWhenStarIsUsedAsClusterName_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll("*" + ":" + SONG_INDEX_NAME);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            // only remote documents are found
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(2));
+            assertThat(response, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, SONG_ID_1R));
+            assertThat(response, searchHitsContainDocumentWithId(1, SONG_INDEX_NAME, SONG_ID_6R));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentOnRemoteClustersWhenStarIsUsedAsClusterName_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll("*" + ":" + PROHIBITED_SONG_INDEX_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentOnBothClustersWhenIndexOnBothClusterArePointedOut_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = SearchRequestFactory.searchAll(REMOTE_SONG_INDEX, SONG_INDEX_NAME);
+            searchRequest.setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(3));
+            assertThat(
+                response,
+                searchHitsContainDocumentsInAnyOrder(
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_1R),
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_2L),
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_6R)
+                )
+            );
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentOnBothClustersWhenIndexOnBothClusterArePointedOut_negativeLackOfLocalAccess() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            var searchRequest = SearchRequestFactory.searchAll(REMOTE_SONG_INDEX, PROHIBITED_SONG_INDEX_NAME);
+            searchRequest.setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentOnBothClustersWhenIndexOnBothClusterArePointedOut_negativeLackOfRemoteAccess() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            String remoteIndex = REMOTE_CLUSTER_NAME + ":" + PROHIBITED_SONG_INDEX_NAME;
+            SearchRequest searchRequest = SearchRequestFactory.searchAll(remoteIndex, SONG_INDEX_NAME);
+            searchRequest.setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchViaAllAliasOnRemoteCluster_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":_all");
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(4));
+            assertThat(
+                response,
+                searchHitsContainDocumentsInAnyOrder(
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_1R),
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_6R),
+                    Pair.of(PROHIBITED_SONG_INDEX_NAME, SONG_ID_3R),
+                    Pair.of(LIMITED_USER_INDEX_NAME, SONG_ID_5R)
+                )
+            );
+        }
+    }
+
+    @Test
+    public void shouldSearchViaAllAliasOnRemoteCluster_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":_all");
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchAllIndexOnRemoteClusterWhenStarIsUsedAsIndexName_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":*");
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(4));
+            assertThat(
+                response,
+                searchHitsContainDocumentsInAnyOrder(
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_1R),
+                    Pair.of(SONG_INDEX_NAME, SONG_ID_6R),
+                    Pair.of(PROHIBITED_SONG_INDEX_NAME, SONG_ID_3R),
+                    Pair.of(LIMITED_USER_INDEX_NAME, SONG_ID_5R)
+                )
+            );
+        }
+    }
+
+    @Test
+    public void shouldSearchAllIndexOnRemoteClusterWhenStarIsUsedAsIndexName_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":*");
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldResolveUserNameExpressionInRoleIndexPattern_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":" + LIMITED_USER_INDEX_NAME);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, numberOfTotalHitsIsEqualTo(1));
+        }
+    }
+
+    @Test
+    public void shouldResolveUserNameExpressionInRoleIndexPattern_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":" + ADMIN_USER_INDEX_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchInIndexWithPrefix_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":song*");
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(2));
+            assertThat(
+                response,
+                searchHitsContainDocumentsInAnyOrder(Pair.of(SONG_INDEX_NAME, SONG_ID_1R), Pair.of(SONG_INDEX_NAME, SONG_ID_6R))
+            );
+        }
+    }
+
+    @Test
+    public void shouldSearchInIndexWithPrefix_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchAll(REMOTE_CLUSTER_NAME + ":prohibited*");
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldEvaluateDocumentLevelSecurityRulesOnRemoteClusterOnSearchRequest_caseRock() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DLS_USER_ROCK)) {
+            SearchRequest searchRequest = searchAll(REMOTE_SONG_INDEX);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            // searching for all documents, so is it important that result contain only one document with id SONG_ID_1
+            // and document with SONG_ID_6 is excluded from result set by DLS
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(1));
+            assertThat(response, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, SONG_ID_1R));
+        }
+    }
+
+    @Test
+    public void shouldEvaluateDocumentLevelSecurityRulesOnRemoteClusterOnSearchRequest_caseJazz() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DLS_USER_JAZZ)) {
+            SearchRequest searchRequest = searchAll(REMOTE_SONG_INDEX);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            // searching for all documents, so is it important that result contain only one document with id SONG_ID_6
+            // and document with SONG_ID_1 is excluded from result set by DLS
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(1));
+            assertThat(response, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, SONG_ID_6R));
+        }
+    }
+
+    @Test
+    public void shouldHaveAccessOnlyToSpecificField() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(FLS_INCLUDE_TITLE_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(REMOTE_SONG_INDEX, QUERY_TITLE_MAGNUM_OPUS);
+            searchRequest.setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(1));
+            // document should contain only title field
+            assertThat(response, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            assertThat(response, searchHitDoesNotContainField(0, FIELD_ARTIST));
+            assertThat(response, searchHitDoesNotContainField(0, FIELD_LYRICS));
+            assertThat(response, searchHitDoesNotContainField(0, FIELD_STARS));
+            assertThat(response, searchHitDoesNotContainField(0, FIELD_GENRE));
+        }
+    }
+
+    @Test
+    public void shouldLackAccessToSpecificField() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(FLS_EXCLUDE_LYRICS_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(REMOTE_SONG_INDEX, QUERY_TITLE_MAGNUM_OPUS);
+            searchRequest.setCcsMinimizeRoundtrips(ccsMinimizeRoundtrips);
+
+            SearchResponse response = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(1));
+            // document should not contain lyrics field
+            assertThat(response, searchHitDoesNotContainField(0, FIELD_LYRICS));
+
+            assertThat(response, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            assertThat(response, searchHitContainsFieldWithValue(0, FIELD_ARTIST, ARTIST_FIRST));
+            assertThat(response, searchHitContainsFieldWithValue(0, FIELD_STARS, 1));
+            assertThat(response, searchHitContainsFieldWithValue(0, FIELD_GENRE, GENRE_ROCK));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/DefaultConfigurationTests.java b/src/integrationTest/java/org/opensearch/security/DefaultConfigurationTests.java
new file mode 100644
index 0000000000..a9f6cf9b1e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/DefaultConfigurationTests.java
@@ -0,0 +1,78 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.commons.io.FileUtils;
+import org.awaitility.Awaitility;
+import org.junit.AfterClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.aMapWithSize;
+import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasKey;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class DefaultConfigurationTests {
+
+    private final static Path configurationFolder = ConfigurationFiles.createConfigurationDirectory();
+    public static final String ADMIN_USER_NAME = "admin";
+    public static final String DEFAULT_PASSWORD = "secret";
+    public static final String NEW_USER = "new-user";
+    public static final String LIMITED_USER = "limited-user";
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .nodeSettings(
+            Map.of(
+                "plugins.security.allow_default_init_securityindex",
+                true,
+                "plugins.security.restapi.roles_enabled",
+                List.of("user_admin__all_access")
+            )
+        )
+        .defaultConfigurationInitDirectory(configurationFolder.toString())
+        .loadConfigurationIntoIndex(false)
+        .build();
+
+    @AfterClass
+    public static void cleanConfigurationDirectory() throws IOException {
+        FileUtils.deleteDirectory(configurationFolder.toFile());
+    }
+
+    @Test
+    public void shouldLoadDefaultConfiguration() {
+        try (TestRestClient client = cluster.getRestClient(NEW_USER, DEFAULT_PASSWORD)) {
+            Awaitility.await().alias("Load default configuration").until(() -> client.getAuthInfo().getStatusCode(), equalTo(200));
+        }
+        try (TestRestClient client = cluster.getRestClient(ADMIN_USER_NAME, DEFAULT_PASSWORD)) {
+            client.assertCorrectCredentials(ADMIN_USER_NAME);
+            HttpResponse response = client.get("/_plugins/_security/api/internalusers");
+            response.assertStatusCode(200);
+            Map<String, Object> users = response.getBodyAs(Map.class);
+            assertThat(users, allOf(aMapWithSize(3), hasKey(ADMIN_USER_NAME), hasKey(NEW_USER), hasKey(LIMITED_USER)));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/DlsIntegrationTests.java b/src/integrationTest/java/org/opensearch/security/DlsIntegrationTests.java
index 2a33a894d1..d1957e50a6 100644
--- a/src/integrationTest/java/org/opensearch/security/DlsIntegrationTests.java
+++ b/src/integrationTest/java/org/opensearch/security/DlsIntegrationTests.java
@@ -1,12 +1,12 @@
 /*
- * Copyright OpenSearch Contributors
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- *
- */
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
 package org.opensearch.security;
 
 import java.io.IOException;
@@ -85,15 +85,15 @@ public class DlsIntegrationTests {
     static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
 
     /**
-     * User who is allowed to read all indices.
-     */
+    * User who is allowed to read all indices.
+    */
     static final TestSecurityConfig.User READ_ALL_USER = new TestSecurityConfig.User("read_all_user").roles(
         new TestSecurityConfig.Role("read_all_user").clusterPermissions("cluster_composite_ops_ro").indexPermissions("read").on("*")
     );
 
     /**
-     * User who is allowed to see all fields on indices {@link #FIRST_INDEX_NAME} and {@link #SECOND_INDEX_NAME}.
-     */
+    * User who is allowed to see all fields on indices {@link #FIRST_INDEX_NAME} and {@link #SECOND_INDEX_NAME}.
+    */
     static final TestSecurityConfig.User READ_FIRST_AND_SECOND_USER = new TestSecurityConfig.User("read_first_and_second_user").roles(
         new TestSecurityConfig.Role("first_index_reader").clusterPermissions("cluster_composite_ops_ro")
             .indexPermissions("read")
@@ -104,8 +104,8 @@ public class DlsIntegrationTests {
     );
 
     /**
-     * User who is allowed to see documents on all indices where value of the {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_STRING}.
-     */
+    * User who is allowed to see documents on all indices where value of the {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_STRING}.
+    */
     static final TestSecurityConfig.User READ_WHERE_FIELD_ARTIST_MATCHES_ARTIST_STRING = new TestSecurityConfig.User(
         "read_where_field_artist_matches_artist_string"
     ).roles(
@@ -131,8 +131,8 @@ public class DlsIntegrationTests {
         );
 
     /**
-     * User who is allowed to see documents on indices where value of {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_TWINS} or {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_FIRST}:
-     */
+    * User who is allowed to see documents on indices where value of {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_TWINS} or {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_FIRST}:
+    */
     static final TestSecurityConfig.User READ_WHERE_FIELD_ARTIST_MATCHES_ARTIST_TWINS_OR_MATCHES_ARTIST_FIRST = new TestSecurityConfig.User(
         "read_where_field_artist_matches_artist_twins_or_artist_first"
     ).roles(
@@ -147,8 +147,8 @@ public class DlsIntegrationTests {
     );
 
     /**
-     * User who is allowed to see documents on all indices where value of the {@link Song#FIELD_STARS} is less than three.
-     */
+    * User who is allowed to see documents on all indices where value of the {@link Song#FIELD_STARS} is less than three.
+    */
     static final TestSecurityConfig.User READ_WHERE_STARS_LESS_THAN_THREE = new TestSecurityConfig.User("read_where_stars_less_than_three")
         .roles(
             new TestSecurityConfig.Role("read_where_stars_less_than_three").clusterPermissions("cluster_composite_ops_ro")
@@ -176,9 +176,9 @@ public class DlsIntegrationTests {
         .build();
 
     /**
-     * Function that returns id assigned to song with title equal to given title or throws {@link RuntimeException}
-     * when no song matches.
-     */
+    * Function that returns id assigned to song with title equal to given title or throws {@link RuntimeException}
+    * when no song matches.
+    */
     static final BiFunction<Map<String, Song>, String, String> FIND_ID_OF_SONG_WITH_TITLE = (map, title) -> map.entrySet()
         .stream()
         .filter(entry -> title.equals(entry.getValue().getTitle()))
@@ -187,9 +187,9 @@ public class DlsIntegrationTests {
         .orElseThrow(() -> new RuntimeException("Cannot find id of song with title: " + title));
 
     /**
-     * Function that returns id assigned to song with artist equal to given artist or throws {@link RuntimeException}
-     * when no song matches.
-     */
+    * Function that returns id assigned to song with artist equal to given artist or throws {@link RuntimeException}
+    * when no song matches.
+    */
     static final BiFunction<Map<String, Song>, String, String> FIND_ID_OF_SONG_WITH_ARTIST = (map, artist) -> map.entrySet()
         .stream()
         .filter(entry -> artist.equals(entry.getValue().getArtist()))
diff --git a/src/integrationTest/java/org/opensearch/security/DoNotFailOnForbiddenTests.java b/src/integrationTest/java/org/opensearch/security/DoNotFailOnForbiddenTests.java
new file mode 100644
index 0000000000..afbb9f38ae
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/DoNotFailOnForbiddenTests.java
@@ -0,0 +1,437 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.hamcrest.Matchers;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+import org.opensearch.action.get.MultiGetItemResponse;
+import org.opensearch.action.get.MultiGetRequest;
+import org.opensearch.action.get.MultiGetResponse;
+import org.opensearch.action.index.IndexRequest;
+import org.opensearch.action.search.MultiSearchRequest;
+import org.opensearch.action.search.MultiSearchResponse;
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.action.search.SearchScrollRequest;
+import org.opensearch.client.Client;
+import org.opensearch.client.Request;
+import org.opensearch.client.Response;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.aMapWithSize;
+import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
+import static org.hamcrest.Matchers.arrayWithSize;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.hasKey;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+import static org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions.Type.ADD;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.core.rest.RestStatus.FORBIDDEN;
+import static org.opensearch.security.Song.FIELD_STARS;
+import static org.opensearch.security.Song.FIELD_TITLE;
+import static org.opensearch.security.Song.QUERY_TITLE_MAGNUM_OPUS;
+import static org.opensearch.security.Song.QUERY_TITLE_NEXT_SONG;
+import static org.opensearch.security.Song.QUERY_TITLE_POISON;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.Song.TITLE_MAGNUM_OPUS;
+import static org.opensearch.security.Song.TITLE_NEXT_SONG;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.averageAggregationRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.getSearchScrollRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.queryStringQueryRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.searchRequestWithScroll;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.statsAggregationRequest;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.containDocument;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.containOnlyDocumentId;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.documentContainField;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.statusException;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.containAggregationWithNameAndType;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.containNotEmptyScrollingId;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfHitsInPageIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfTotalHitsIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitContainsFieldWithValue;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentWithId;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class DoNotFailOnForbiddenTests {
+
+    /**
+    * Songs accessible for {@link #LIMITED_USER}
+    */
+    private static final String MARVELOUS_SONGS = "marvelous_songs";
+
+    /**
+    * Songs inaccessible for {@link #LIMITED_USER}
+    */
+    private static final String HORRIBLE_SONGS = "horrible_songs";
+
+    private static final String BOTH_INDEX_PATTERN = "*songs";
+
+    private static final String ID_1 = "1";
+    private static final String ID_2 = "2";
+    private static final String ID_3 = "3";
+    private static final String ID_4 = "4";
+
+    private static final User ADMIN_USER = new User("admin").roles(ALL_ACCESS);
+    private static final User LIMITED_USER = new User("limited_user").roles(
+        new TestSecurityConfig.Role("limited-role").clusterPermissions(
+            "indices:data/read/mget",
+            "indices:data/read/msearch",
+            "indices:data/read/scroll",
+            "cluster:monitor/state",
+            "cluster:monitor/health"
+        )
+            .indexPermissions(
+                "indices:data/read/search",
+                "indices:data/read/mget*",
+                "indices:data/read/field_caps",
+                "indices:data/read/field_caps*",
+                "indices:data/read/msearch",
+                "indices:data/read/scroll",
+                "indices:monitor/settings/get",
+                "indices:monitor/stats"
+            )
+            .on(MARVELOUS_SONGS)
+    );
+
+    private static final String BOTH_INDEX_ALIAS = "both-indices";
+    private static final String FORBIDDEN_INDEX_ALIAS = "forbidden-index";
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER, LIMITED_USER)
+        .anonymousAuth(false)
+        .doNotFailOnForbidden(true)
+        .build();
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(MARVELOUS_SONGS).id(ID_1).source(SONGS[0].asMap()))
+                .actionGet();
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(MARVELOUS_SONGS).id(ID_2).source(SONGS[1].asMap()))
+                .actionGet();
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(MARVELOUS_SONGS).id(ID_3).source(SONGS[2].asMap()))
+                .actionGet();
+
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(HORRIBLE_SONGS).id(ID_4).source(SONGS[3].asMap()))
+                .actionGet();
+
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(MARVELOUS_SONGS, HORRIBLE_SONGS).alias(BOTH_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(HORRIBLE_SONGS).alias(FORBIDDEN_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+
+        }
+    }
+
+    @Test
+    public void shouldPerformSimpleSearch_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(
+                new String[] { MARVELOUS_SONGS, HORRIBLE_SONGS },
+                QUERY_TITLE_MAGNUM_OPUS
+            );
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThatContainOneSong(searchResponse, ID_1, TITLE_MAGNUM_OPUS);
+        }
+    }
+
+    private static void assertThatContainOneSong(SearchResponse searchResponse, String documentId, String title) {
+        assertThat(searchResponse, isSuccessfulSearchResponse());
+        assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+        assertThat(searchResponse, searchHitsContainDocumentWithId(0, MARVELOUS_SONGS, documentId));
+        assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, title));
+    }
+
+    @Test
+    public void shouldPerformSimpleSearch_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(HORRIBLE_SONGS, QUERY_TITLE_POISON);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaIndexPattern_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(BOTH_INDEX_PATTERN, QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThatContainOneSong(searchResponse, ID_1, TITLE_MAGNUM_OPUS);
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaIndexPattern_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(HORRIBLE_SONGS, QUERY_TITLE_POISON);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(BOTH_INDEX_ALIAS, QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThatContainOneSong(searchResponse, ID_1, TITLE_MAGNUM_OPUS);
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(FORBIDDEN_INDEX_ALIAS, QUERY_TITLE_POISON);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaAll_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("_all", QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThatContainOneSong(searchResponse, ID_1, TITLE_MAGNUM_OPUS);
+        }
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaAll_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("_all", QUERY_TITLE_POISON);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(0));
+        }
+    }
+
+    @Test
+    public void shouldMGetDocument_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            MultiGetRequest request = new MultiGetRequest().add(BOTH_INDEX_PATTERN, ID_1).add(BOTH_INDEX_PATTERN, ID_4);
+
+            MultiGetResponse response = restHighLevelClient.mget(request, DEFAULT);
+
+            MultiGetItemResponse[] responses = response.getResponses();
+            assertThat(responses, arrayWithSize(2));
+            MultiGetItemResponse firstResult = responses[0];
+            MultiGetItemResponse secondResult = responses[1];
+            assertThat(firstResult.getFailure(), nullValue());
+            assertThat(secondResult.getFailure(), nullValue());
+            assertThat(
+                firstResult.getResponse(),
+                allOf(containDocument(MARVELOUS_SONGS, ID_1), documentContainField(FIELD_TITLE, TITLE_MAGNUM_OPUS))
+            );
+            assertThat(secondResult.getResponse(), containOnlyDocumentId(MARVELOUS_SONGS, ID_4));
+        }
+    }
+
+    @Test
+    public void shouldMGetDocument_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            MultiGetRequest request = new MultiGetRequest().add(HORRIBLE_SONGS, ID_4);
+
+            assertThatThrownBy(() -> restHighLevelClient.mget(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldMSearchDocument_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            MultiSearchRequest request = new MultiSearchRequest();
+            request.add(queryStringQueryRequest(BOTH_INDEX_PATTERN, QUERY_TITLE_MAGNUM_OPUS));
+            request.add(queryStringQueryRequest(BOTH_INDEX_PATTERN, QUERY_TITLE_NEXT_SONG));
+
+            MultiSearchResponse response = restHighLevelClient.msearch(request, DEFAULT);
+
+            MultiSearchResponse.Item[] responses = response.getResponses();
+            assertThat(responses, Matchers.arrayWithSize(2));
+            assertThat(responses[0].getFailure(), nullValue());
+            assertThat(responses[1].getFailure(), nullValue());
+
+            assertThat(responses[0].getResponse(), searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            assertThat(responses[0].getResponse(), searchHitsContainDocumentWithId(0, MARVELOUS_SONGS, ID_1));
+            assertThat(responses[1].getResponse(), searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_NEXT_SONG));
+            assertThat(responses[1].getResponse(), searchHitsContainDocumentWithId(0, MARVELOUS_SONGS, ID_3));
+        }
+    }
+
+    @Test
+    public void shouldMSearchDocument_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            MultiSearchRequest request = new MultiSearchRequest();
+            request.add(queryStringQueryRequest(FORBIDDEN_INDEX_ALIAS, QUERY_TITLE_POISON));
+
+            assertThatThrownBy(() -> restHighLevelClient.msearch(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldGetFieldCapabilities_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().indices(MARVELOUS_SONGS, HORRIBLE_SONGS).fields(FIELD_TITLE);
+
+            FieldCapabilitiesResponse response = restHighLevelClient.fieldCaps(request, DEFAULT);
+
+            assertThat(response.get(), aMapWithSize(1));
+            assertThat(response.getIndices(), arrayWithSize(1));
+            assertThat(response.getField(FIELD_TITLE), hasKey("text"));
+            assertThat(response.getIndices(), arrayContainingInAnyOrder(MARVELOUS_SONGS));
+        }
+    }
+
+    @Test
+    public void shouldGetFieldCapabilities_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().indices(HORRIBLE_SONGS).fields(FIELD_TITLE);
+
+            assertThatThrownBy(() -> restHighLevelClient.fieldCaps(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldScrollOverSearchResults_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchRequestWithScroll(BOTH_INDEX_PATTERN, 2);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containNotEmptyScrollingId());
+
+            SearchScrollRequest scrollRequest = getSearchScrollRequest(searchResponse);
+
+            SearchResponse scrollResponse = restHighLevelClient.scroll(scrollRequest, DEFAULT);
+            assertThat(scrollResponse, isSuccessfulSearchResponse());
+            assertThat(scrollResponse, containNotEmptyScrollingId());
+            assertThat(scrollResponse, numberOfTotalHitsIsEqualTo(3));
+            assertThat(scrollResponse, numberOfHitsInPageIsEqualTo(1));
+        }
+    }
+
+    @Test
+    public void shouldScrollOverSearchResults_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            SearchRequest searchRequest = searchRequestWithScroll(HORRIBLE_SONGS, 2);
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldPerformAggregation_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            final String aggregationName = "averageStars";
+            SearchRequest searchRequest = averageAggregationRequest(BOTH_INDEX_PATTERN, aggregationName, FIELD_STARS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containAggregationWithNameAndType(aggregationName, "avg"));
+        }
+    }
+
+    @Test
+    public void shouldPerformAggregation_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            final String aggregationName = "averageStars";
+            SearchRequest searchRequest = averageAggregationRequest(HORRIBLE_SONGS, aggregationName, FIELD_STARS);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldPerformStatAggregation_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            final String aggregationName = "statsStars";
+            SearchRequest searchRequest = statsAggregationRequest(BOTH_INDEX_ALIAS, aggregationName, FIELD_STARS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containAggregationWithNameAndType(aggregationName, "stats"));
+        }
+    }
+
+    @Test
+    public void shouldPerformStatAggregation_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            final String aggregationName = "statsStars";
+            SearchRequest searchRequest = statsAggregationRequest(HORRIBLE_SONGS, aggregationName, FIELD_STARS);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldPerformCatIndices_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_USER)) {
+            Request getIndicesRequest = new Request("GET", "/_cat/indices");
+            // High level client doesn't support _cat/_indices API
+            Response getIndicesResponse = restHighLevelClient.getLowLevelClient().performRequest(getIndicesRequest);
+            List<String> indexes = new BufferedReader(new InputStreamReader(getIndicesResponse.getEntity().getContent())).lines()
+                .collect(Collectors.toList());
+
+            assertThat(indexes.size(), equalTo(1));
+            assertThat(indexes.get(0), containsString("marvelous_songs"));
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java b/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java
new file mode 100644
index 0000000000..4a5460f329
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java
@@ -0,0 +1,814 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.function.BiFunction;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+import org.opensearch.action.get.GetRequest;
+import org.opensearch.action.get.GetResponse;
+import org.opensearch.action.get.MultiGetItemResponse;
+import org.opensearch.action.get.MultiGetRequest;
+import org.opensearch.action.get.MultiGetResponse;
+import org.opensearch.action.index.IndexRequest;
+import org.opensearch.action.index.IndexResponse;
+import org.opensearch.action.search.MultiSearchRequest;
+import org.opensearch.action.search.MultiSearchResponse;
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.action.search.SearchScrollRequest;
+import org.opensearch.client.Client;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.index.query.QueryBuilder;
+import org.opensearch.index.query.QueryBuilders;
+import org.opensearch.search.aggregations.Aggregation;
+import org.opensearch.search.aggregations.metrics.ParsedAvg;
+import org.opensearch.search.sort.SortOrder;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.everyItem;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.nullValue;
+import static org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions.Type.ADD;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.security.Song.ARTIST_FIRST;
+import static org.opensearch.security.Song.ARTIST_STRING;
+import static org.opensearch.security.Song.ARTIST_TWINS;
+import static org.opensearch.security.Song.FIELD_ARTIST;
+import static org.opensearch.security.Song.FIELD_LYRICS;
+import static org.opensearch.security.Song.FIELD_STARS;
+import static org.opensearch.security.Song.FIELD_TITLE;
+import static org.opensearch.security.Song.QUERY_TITLE_NEXT_SONG;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.Song.TITLE_NEXT_SONG;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.averageAggregationRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.getSearchScrollRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.queryByIdsRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.searchRequestWithScroll;
+import static org.opensearch.test.framework.matcher.FieldCapabilitiesResponseMatchers.containsExactlyIndices;
+import static org.opensearch.test.framework.matcher.FieldCapabilitiesResponseMatchers.containsFieldWithNameAndType;
+import static org.opensearch.test.framework.matcher.FieldCapabilitiesResponseMatchers.numberOfFieldsIsEqualTo;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.containDocument;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.documentContainField;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.documentDoesNotContainField;
+import static org.opensearch.test.framework.matcher.MultiGetResponseMatchers.isSuccessfulMultiGetResponse;
+import static org.opensearch.test.framework.matcher.MultiGetResponseMatchers.numberOfGetItemResponsesIsEqualTo;
+import static org.opensearch.test.framework.matcher.MultiSearchResponseMatchers.isSuccessfulMultiSearchResponse;
+import static org.opensearch.test.framework.matcher.MultiSearchResponseMatchers.numberOfSearchItemResponsesIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.containAggregationWithNameAndType;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.containNotEmptyScrollingId;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfTotalHitsIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitContainsFieldWithValue;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitDoesNotContainField;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentWithId;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class FlsAndFieldMaskingTests {
+
+    static final String FIRST_INDEX_ID_SONG_1 = "INDEX_1_S1";
+    static final String FIRST_INDEX_ID_SONG_2 = "INDEX_1_S2";
+    static final String FIRST_INDEX_ID_SONG_3 = "INDEX_1_S3";
+    static final String FIRST_INDEX_ID_SONG_4 = "INDEX_1_S4";
+    static final String SECOND_INDEX_ID_SONG_1 = "INDEX_2_S1";
+    static final String SECOND_INDEX_ID_SONG_2 = "INDEX_2_S2";
+    static final String SECOND_INDEX_ID_SONG_3 = "INDEX_2_S3";
+    static final String SECOND_INDEX_ID_SONG_4 = "INDEX_2_S4";
+
+    static final String INDEX_NAME_SUFFIX = "-test-index";
+    static final String FIRST_INDEX_NAME = "first".concat(INDEX_NAME_SUFFIX);
+    static final String SECOND_INDEX_NAME = "second".concat(INDEX_NAME_SUFFIX);
+    static final String FIRST_INDEX_ALIAS = FIRST_INDEX_NAME.concat("-alias");
+    static final String SECOND_INDEX_ALIAS = SECOND_INDEX_NAME.concat("-alias");
+    static final String FIRST_INDEX_ALIAS_FILTERED_BY_NEXT_SONG_TITLE = FIRST_INDEX_NAME.concat("-filtered-by-next-song-title");
+    static final String FIRST_INDEX_ALIAS_FILTERED_BY_TWINS_ARTIST = FIRST_INDEX_NAME.concat("-filtered-by-twins-artist");
+    static final String FIRST_INDEX_ALIAS_FILTERED_BY_FIRST_ARTIST = FIRST_INDEX_NAME.concat("-filtered-by-first-artist");
+    static final String ALL_INDICES_ALIAS = "_all";
+
+    static final String MASK_VALUE = "*";
+
+    static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    /**
+    * User who is allowed to see all fields on all indices. Values of the title and artist fields should be masked.
+    */
+    static final TestSecurityConfig.User ALL_INDICES_MASKED_TITLE_ARTIST_READER = new TestSecurityConfig.User("masked_artist_title_reader")
+        .roles(
+            new TestSecurityConfig.Role("masked_artist_title_reader").clusterPermissions("cluster_composite_ops_ro")
+                .indexPermissions("read")
+                .maskedFields(
+                    FIELD_TITLE.concat("::/(?<=.{1})./::").concat(MASK_VALUE),
+                    FIELD_ARTIST.concat("::/(?<=.{1})./::").concat(MASK_VALUE)
+                )
+                .on("*")
+        );
+
+    /**
+    * User who is allowed to see all fields on indices {@link #FIRST_INDEX_NAME} and {@link #SECOND_INDEX_NAME}.
+    * <ul>
+    *     <li>values of the artist and lyrics fields should be masked on index {@link #FIRST_INDEX_NAME}</li>
+    *     <li>values of the lyrics field should be masked on index {@link #SECOND_INDEX_NAME}</li>
+    * </ul>
+    */
+    static final TestSecurityConfig.User MASKED_ARTIST_LYRICS_READER = new TestSecurityConfig.User("masked_title_artist_lyrics_reader")
+        .roles(
+            new TestSecurityConfig.Role("masked_title_artist_lyrics_reader").clusterPermissions("cluster_composite_ops_ro")
+                .indexPermissions("read")
+                .maskedFields(
+                    FIELD_ARTIST.concat("::/(?<=.{1})./::").concat(MASK_VALUE),
+                    FIELD_LYRICS.concat("::/(?<=.{1})./::").concat(MASK_VALUE)
+                )
+                .on(FIRST_INDEX_NAME),
+            new TestSecurityConfig.Role("masked_lyrics_reader").clusterPermissions("cluster_composite_ops_ro")
+                .indexPermissions("read")
+                .maskedFields(FIELD_LYRICS.concat("::/(?<=.{1})./::").concat(MASK_VALUE))
+                .on(SECOND_INDEX_NAME)
+        );
+
+    /**
+    * Function that converts field value to value masked with {@link #MASK_VALUE}
+    */
+    static final Function<String, String> VALUE_TO_MASKED_VALUE = value -> value.substring(0, 1)
+        .concat(MASK_VALUE.repeat(value.length() - 1));
+
+    /**
+    * User who is allowed to see documents on all indices where value of the {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_STRING}.
+    */
+    static final TestSecurityConfig.User ALL_INDICES_STRING_ARTIST_READER = new TestSecurityConfig.User("string_artist_reader").roles(
+        new TestSecurityConfig.Role("string_artist_reader").clusterPermissions("cluster_composite_ops_ro")
+            .indexPermissions("read")
+            .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_STRING))
+            .on("*")
+    );
+
+    /**
+    * User who is allowed to see documents on index:
+    * <ul>
+    *     <li>{@link #FIRST_INDEX_NAME} where value of the {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_TWINS}</li>
+    *     <li>{@link #SECOND_INDEX_NAME} where value of the {@link Song#FIELD_ARTIST} field matches {@link Song#ARTIST_FIRST}</li>
+    * </ul>
+    */
+    static final TestSecurityConfig.User TWINS_FIRST_ARTIST_READER = new TestSecurityConfig.User("twins_first_artist_reader").roles(
+        new TestSecurityConfig.Role("twins_artist_reader").clusterPermissions("cluster_composite_ops_ro")
+            .indexPermissions("read")
+            .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_TWINS))
+            .on(FIRST_INDEX_NAME),
+        new TestSecurityConfig.Role("first_artist_reader").clusterPermissions("cluster_composite_ops_ro")
+            .indexPermissions("read")
+            .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_FIRST))
+            .on(SECOND_INDEX_NAME)
+    );
+
+    /**
+    * User who is allowed to see documents on all indices where value of the {@link Song#FIELD_STARS} is less than zero.
+    */
+    static final TestSecurityConfig.User ALL_INDICES_STARS_LESS_THAN_ZERO_READER = new TestSecurityConfig.User(
+        "stars_less_than_zero_reader"
+    ).roles(
+        new TestSecurityConfig.Role("stars_less_than_zero_reader").clusterPermissions("cluster_composite_ops_ro")
+            .indexPermissions("read")
+            .dls(String.format("{\"range\":{\"%s\":{\"lt\":%d}}}", FIELD_STARS, 0))
+            .on("*")
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(false)
+        .nodeSettings(
+            Map.of("plugins.security.restapi.roles_enabled", List.of("user_" + ADMIN_USER.getName() + "__" + ALL_ACCESS.getName()))
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(
+            ADMIN_USER,
+            ALL_INDICES_MASKED_TITLE_ARTIST_READER,
+            MASKED_ARTIST_LYRICS_READER,
+            ALL_INDICES_STRING_ARTIST_READER,
+            ALL_INDICES_STARS_LESS_THAN_ZERO_READER,
+            TWINS_FIRST_ARTIST_READER
+        )
+        .build();
+
+    /**
+    * Function that returns id assigned to song with title equal to given title or throws {@link RuntimeException}
+    * when no song matches.
+    */
+    static final BiFunction<Map<String, Song>, String, String> FIND_ID_OF_SONG_WITH_TITLE = (map, title) -> map.entrySet()
+        .stream()
+        .filter(entry -> title.equals(entry.getValue().getTitle()))
+        .findAny()
+        .map(Map.Entry::getKey)
+        .orElseThrow(() -> new RuntimeException("Cannot find id of song with title: " + title));
+
+    /**
+    * Function that returns id assigned to song with artist equal to given artist or throws {@link RuntimeException}
+    * when no song matches.
+    */
+    static final BiFunction<Map<String, Song>, String, String> FIND_ID_OF_SONG_WITH_ARTIST = (map, artist) -> map.entrySet()
+        .stream()
+        .filter(entry -> artist.equals(entry.getValue().getArtist()))
+        .findAny()
+        .map(Map.Entry::getKey)
+        .orElseThrow(() -> new RuntimeException("Cannot find id of song with artist: " + artist));
+
+    static final TreeMap<String, Song> FIRST_INDEX_SONGS_BY_ID = new TreeMap<>() {
+        {
+            put(FIRST_INDEX_ID_SONG_1, SONGS[0]);
+            put(FIRST_INDEX_ID_SONG_2, SONGS[1]);
+            put(FIRST_INDEX_ID_SONG_3, SONGS[2]);
+            put(FIRST_INDEX_ID_SONG_4, SONGS[3]);
+        }
+    };
+
+    static final TreeMap<String, Song> SECOND_INDEX_SONGS_BY_ID = new TreeMap<>() {
+        {
+            put(SECOND_INDEX_ID_SONG_1, SONGS[3]);
+            put(SECOND_INDEX_ID_SONG_2, SONGS[2]);
+            put(SECOND_INDEX_ID_SONG_3, SONGS[1]);
+            put(SECOND_INDEX_ID_SONG_4, SONGS[0]);
+        }
+    };
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            FIRST_INDEX_SONGS_BY_ID.forEach((id, song) -> {
+                client.prepareIndex(FIRST_INDEX_NAME).setId(id).setRefreshPolicy(IMMEDIATE).setSource(song.asMap()).get();
+            });
+
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(FIRST_INDEX_NAME).alias(FIRST_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).index(FIRST_INDEX_NAME)
+                            .alias(FIRST_INDEX_ALIAS_FILTERED_BY_NEXT_SONG_TITLE)
+                            .filter(QueryBuilders.queryStringQuery(QUERY_TITLE_NEXT_SONG))
+                    )
+                )
+                .actionGet();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).index(FIRST_INDEX_NAME)
+                            .alias(FIRST_INDEX_ALIAS_FILTERED_BY_TWINS_ARTIST)
+                            .filter(QueryBuilders.queryStringQuery(String.format("%s:%s", FIELD_ARTIST, ARTIST_TWINS)))
+                    )
+                )
+                .actionGet();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).index(FIRST_INDEX_NAME)
+                            .alias(FIRST_INDEX_ALIAS_FILTERED_BY_FIRST_ARTIST)
+                            .filter(QueryBuilders.queryStringQuery(String.format("%s:%s", FIELD_ARTIST, ARTIST_FIRST)))
+                    )
+                )
+                .actionGet();
+
+            SECOND_INDEX_SONGS_BY_ID.forEach((id, song) -> {
+                client.prepareIndex(SECOND_INDEX_NAME).setId(id).setRefreshPolicy(IMMEDIATE).setSource(song.asMap()).get();
+            });
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(SECOND_INDEX_NAME).alias(SECOND_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+        }
+    }
+
+    @Test
+    public void flsEnabledFieldsAreHiddenForNormalUsers() throws IOException {
+        String indexName = "fls_index";
+        String indexAlias = "fls_index_alias";
+        String indexFilteredAlias = "fls_index_filtered_alias";
+        TestSecurityConfig.Role userRole = new TestSecurityConfig.Role("fls_exclude_stars_reader").clusterPermissions(
+            "cluster_composite_ops_ro"
+        ).indexPermissions("read").fls("~".concat(FIELD_STARS)).on("*");
+        TestSecurityConfig.User user = createUserWithRole("fls_user", userRole);
+        List<String> docIds = createIndexWithDocs(indexName, SONGS[0], SONGS[1]);
+        addAliasToIndex(indexName, indexAlias);
+        addAliasToIndex(
+            indexName,
+            indexFilteredAlias,
+            QueryBuilders.queryStringQuery(String.format("%s:%s", FIELD_ARTIST, SONGS[0].getArtist()))
+        );
+
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(user)) {
+            // search
+            SearchResponse searchResponse = restHighLevelClient.search(new SearchRequest(indexName), DEFAULT);
+
+            assertSearchHitsDoNotContainField(searchResponse, FIELD_STARS);
+
+            // search with index pattern
+            searchResponse = restHighLevelClient.search(new SearchRequest("*".concat(indexName)), DEFAULT);
+
+            assertSearchHitsDoNotContainField(searchResponse, FIELD_STARS);
+
+            // search via alias
+            searchResponse = restHighLevelClient.search(new SearchRequest(indexAlias), DEFAULT);
+
+            assertSearchHitsDoNotContainField(searchResponse, FIELD_STARS);
+
+            // search via filtered alias
+            searchResponse = restHighLevelClient.search(new SearchRequest(indexFilteredAlias), DEFAULT);
+
+            assertSearchHitsDoNotContainField(searchResponse, FIELD_STARS);
+
+            // search via all indices alias
+            searchResponse = restHighLevelClient.search(new SearchRequest(ALL_INDICES_ALIAS), DEFAULT);
+
+            assertSearchHitsDoNotContainField(searchResponse, FIELD_STARS);
+
+            // scroll
+            searchResponse = restHighLevelClient.search(searchRequestWithScroll(indexName, 1), DEFAULT);
+
+            assertSearchHitsDoNotContainField(searchResponse, FIELD_STARS);
+
+            SearchScrollRequest scrollRequest = getSearchScrollRequest(searchResponse);
+            SearchResponse scrollResponse = restHighLevelClient.scroll(scrollRequest, DEFAULT);
+
+            assertSearchHitsDoNotContainField(scrollResponse, FIELD_STARS);
+
+            // aggregate data and compute avg
+            String aggregationName = "averageStars";
+            searchResponse = restHighLevelClient.search(averageAggregationRequest(indexName, aggregationName, FIELD_STARS), DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containAggregationWithNameAndType(aggregationName, "avg"));
+            Aggregation actualAggregation = searchResponse.getAggregations().get(aggregationName);
+            assertThat(actualAggregation, instanceOf(ParsedAvg.class));
+            assertThat(((ParsedAvg) actualAggregation).getValue(), is(Double.POSITIVE_INFINITY)); // user cannot see the STARS field
+
+            // get document
+            GetResponse getResponse = restHighLevelClient.get(new GetRequest(indexName, docIds.get(0)), DEFAULT);
+
+            assertThat(getResponse, documentDoesNotContainField(FIELD_STARS));
+
+            // multi get
+            for (String index : List.of(indexName, indexAlias)) {
+                MultiGetRequest multiGetRequest = new MultiGetRequest();
+                docIds.forEach(id -> multiGetRequest.add(new MultiGetRequest.Item(index, id)));
+
+                MultiGetResponse multiGetResponse = restHighLevelClient.mget(multiGetRequest, DEFAULT);
+
+                List<GetResponse> getResponses = Arrays.stream(multiGetResponse.getResponses())
+                    .map(MultiGetItemResponse::getResponse)
+                    .collect(Collectors.toList());
+                assertThat(getResponses, everyItem(documentDoesNotContainField(FIELD_STARS)));
+            }
+
+            // multi search
+            for (String index : List.of(indexName, indexAlias)) {
+                MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
+                docIds.forEach(id -> multiSearchRequest.add(queryByIdsRequest(index, id)));
+                MultiSearchResponse multiSearchResponse = restHighLevelClient.msearch(multiSearchRequest, DEFAULT);
+
+                assertThat(multiSearchResponse, isSuccessfulMultiSearchResponse());
+                List<MultiSearchResponse.Item> itemResponses = List.of(multiSearchResponse.getResponses());
+                itemResponses.forEach(item -> assertSearchHitsDoNotContainField(item.getResponse(), FIELD_STARS));
+            }
+
+            // field capabilities
+            FieldCapabilitiesResponse fieldCapsResponse = restHighLevelClient.fieldCaps(
+                new FieldCapabilitiesRequest().indices(indexName).fields(FIELD_TITLE, FIELD_STARS),
+                DEFAULT
+            );
+            assertThat(fieldCapsResponse.getField(FIELD_STARS), nullValue());
+        }
+    }
+
+    private static List<String> createIndexWithDocs(String indexName, Song... songs) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            return Stream.of(songs).map(song -> {
+                IndexResponse response = client.index(new IndexRequest(indexName).setRefreshPolicy(IMMEDIATE).source(song.asMap()))
+                    .actionGet();
+                return response.getId();
+            }).collect(Collectors.toList());
+        }
+    }
+
+    private static void addAliasToIndex(String indexName, String alias) {
+        addAliasToIndex(indexName, alias, QueryBuilders.matchAllQuery());
+    }
+
+    private static void addAliasToIndex(String indexName, String alias, QueryBuilder filterQuery) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(indexName).alias(alias).filter(filterQuery)
+                    )
+                )
+                .actionGet();
+        }
+    }
+
+    private static TestSecurityConfig.User createUserWithRole(String userName, TestSecurityConfig.Role role) {
+        TestSecurityConfig.User user = new TestSecurityConfig.User(userName);
+        try (TestRestClient client = cluster.getRestClient(ADMIN_USER)) {
+            client.createRole(role.getName(), role).assertStatusCode(201);
+            client.createUser(user.getName(), user).assertStatusCode(201);
+            client.assignRoleToUser(user.getName(), role.getName()).assertStatusCode(200);
+        }
+        return user;
+    }
+
+    private static void assertSearchHitsDoNotContainField(SearchResponse response, String excludedField) {
+        assertThat(response, isSuccessfulSearchResponse());
+        assertThat(response.getHits().getHits().length, greaterThan(0));
+        IntStream.range(0, response.getHits().getHits().length)
+            .boxed()
+            .forEach(index -> assertThat(response, searchHitDoesNotContainField(index, excludedField)));
+    }
+
+    @Test
+    public void searchForDocuments() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String songId = FIRST_INDEX_ID_SONG_1;
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+
+            SearchRequest searchRequest = queryByIdsRequest(FIRST_INDEX_NAME, songId);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+
+            songId = SECOND_INDEX_ID_SONG_2;
+            song = SECOND_INDEX_SONGS_BY_ID.get(songId);
+
+            searchRequest = queryByIdsRequest(SECOND_INDEX_NAME, songId);
+            searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SECOND_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, song.getArtist()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void searchForDocumentsWithIndexPattern() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String songId = FIRST_INDEX_ID_SONG_2;
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+
+            SearchRequest searchRequest = queryByIdsRequest("*".concat(FIRST_INDEX_NAME), songId);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+
+            songId = SECOND_INDEX_ID_SONG_3;
+            song = SECOND_INDEX_SONGS_BY_ID.get(songId);
+
+            searchRequest = queryByIdsRequest("*".concat(SECOND_INDEX_NAME), songId);
+            searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SECOND_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, song.getArtist()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void searchForDocumentsViaAlias() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String songId = FIRST_INDEX_ID_SONG_3;
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+
+            SearchRequest searchRequest = queryByIdsRequest(FIRST_INDEX_ALIAS, songId);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+
+            songId = SECOND_INDEX_ID_SONG_4;
+            song = SECOND_INDEX_SONGS_BY_ID.get(songId);
+
+            searchRequest = queryByIdsRequest("*".concat(SECOND_INDEX_ALIAS), songId);
+            searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SECOND_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, song.getArtist()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void searchForDocumentsViaFilteredAlias() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String songId = FIND_ID_OF_SONG_WITH_TITLE.apply(FIRST_INDEX_SONGS_BY_ID, TITLE_NEXT_SONG);
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+
+            SearchRequest searchRequest = new SearchRequest(FIRST_INDEX_ALIAS_FILTERED_BY_NEXT_SONG_TITLE);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void searchForDocumentsViaAllIndicesAlias() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ALL_INDICES_MASKED_TITLE_ARTIST_READER)) {
+            String songId = FIRST_INDEX_ID_SONG_4;
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+
+            SearchRequest searchRequest = queryByIdsRequest(ALL_INDICES_ALIAS, songId);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, VALUE_TO_MASKED_VALUE.apply(song.getTitle())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, song.getLyrics()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+
+            songId = SECOND_INDEX_ID_SONG_1;
+            song = SECOND_INDEX_SONGS_BY_ID.get(songId);
+
+            searchRequest = queryByIdsRequest(ALL_INDICES_ALIAS, songId);
+            searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SECOND_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, VALUE_TO_MASKED_VALUE.apply(song.getTitle())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, song.getLyrics()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void scrollOverSearchResults() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String songId = FIRST_INDEX_SONGS_BY_ID.firstKey();
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+
+            SearchRequest searchRequest = searchRequestWithScroll(FIRST_INDEX_NAME, 1);
+            searchRequest.source().sort("_id", SortOrder.ASC);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containNotEmptyScrollingId());
+
+            SearchScrollRequest scrollRequest = getSearchScrollRequest(searchResponse);
+
+            SearchResponse scrollResponse = restHighLevelClient.scroll(scrollRequest, DEFAULT);
+            assertThat(scrollResponse, isSuccessfulSearchResponse());
+            assertThat(scrollResponse, containNotEmptyScrollingId());
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, songId));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, song.getTitle()));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void aggregateDataAndComputeAverage() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String aggregationName = "averageStars";
+            Double expectedValue = FIRST_INDEX_SONGS_BY_ID.values()
+                .stream()
+                .mapToDouble(Song::getStars)
+                .average()
+                .orElseThrow(() -> new RuntimeException("Cannot compute average stars - list of docs is empty"));
+            SearchRequest searchRequest = averageAggregationRequest(FIRST_INDEX_NAME, aggregationName, FIELD_STARS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containAggregationWithNameAndType(aggregationName, "avg"));
+            Aggregation actualAggregation = searchResponse.getAggregations().get(aggregationName);
+            assertThat(actualAggregation, instanceOf(ParsedAvg.class));
+            assertThat(((ParsedAvg) actualAggregation).getValue(), is(expectedValue));
+        }
+    }
+
+    @Test
+    public void getDocument() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            String songId = FIRST_INDEX_ID_SONG_4;
+            Song song = FIRST_INDEX_SONGS_BY_ID.get(songId);
+            GetResponse response = restHighLevelClient.get(new GetRequest(FIRST_INDEX_NAME, songId), DEFAULT);
+
+            assertThat(response, containDocument(FIRST_INDEX_NAME, songId));
+            assertThat(response, documentContainField(FIELD_TITLE, song.getTitle()));
+            assertThat(response, documentContainField(FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(response, documentContainField(FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(song.getArtist())));
+            assertThat(response, documentContainField(FIELD_STARS, song.getStars()));
+
+            songId = SECOND_INDEX_ID_SONG_1;
+            song = SECOND_INDEX_SONGS_BY_ID.get(songId);
+            response = restHighLevelClient.get(new GetRequest(SECOND_INDEX_NAME, songId), DEFAULT);
+
+            assertThat(response, containDocument(SECOND_INDEX_NAME, songId));
+            assertThat(response, documentContainField(FIELD_TITLE, song.getTitle()));
+            assertThat(response, documentContainField(FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(song.getLyrics())));
+            assertThat(response, documentContainField(FIELD_ARTIST, song.getArtist()));
+            assertThat(response, documentContainField(FIELD_STARS, song.getStars()));
+        }
+    }
+
+    @Test
+    public void multiGetDocuments() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            List<List<String>> indicesToCheck = List.of(
+                List.of(FIRST_INDEX_NAME, SECOND_INDEX_NAME),
+                List.of(FIRST_INDEX_ALIAS, SECOND_INDEX_ALIAS)
+            );
+            String firstSongId = FIRST_INDEX_ID_SONG_1;
+            Song firstSong = FIRST_INDEX_SONGS_BY_ID.get(firstSongId);
+            String secondSongId = SECOND_INDEX_ID_SONG_2;
+            Song secondSong = SECOND_INDEX_SONGS_BY_ID.get(secondSongId);
+
+            for (List<String> indices : indicesToCheck) {
+                MultiGetRequest request = new MultiGetRequest();
+                request.add(new MultiGetRequest.Item(indices.get(0), firstSongId));
+                request.add(new MultiGetRequest.Item(indices.get(1), secondSongId));
+                MultiGetResponse response = restHighLevelClient.mget(request, DEFAULT);
+
+                assertThat(response, isSuccessfulMultiGetResponse());
+                assertThat(response, numberOfGetItemResponsesIsEqualTo(2));
+
+                MultiGetItemResponse[] responses = response.getResponses();
+                assertThat(
+                    responses[0].getResponse(),
+                    allOf(
+                        containDocument(FIRST_INDEX_NAME, FIRST_INDEX_ID_SONG_1),
+                        documentContainField(FIELD_TITLE, firstSong.getTitle()),
+                        documentContainField(FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(firstSong.getLyrics())),
+                        documentContainField(FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(firstSong.getArtist())),
+                        documentContainField(FIELD_STARS, firstSong.getStars())
+                    )
+                );
+                assertThat(
+                    responses[1].getResponse(),
+                    allOf(
+                        containDocument(SECOND_INDEX_NAME, secondSongId),
+                        documentContainField(FIELD_TITLE, secondSong.getTitle()),
+                        documentContainField(FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(secondSong.getLyrics())),
+                        documentContainField(FIELD_ARTIST, secondSong.getArtist()),
+                        documentContainField(FIELD_STARS, secondSong.getStars())
+                    )
+                );
+            }
+        }
+    }
+
+    @Test
+    public void multiSearchDocuments() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            List<List<String>> indicesToCheck = List.of(
+                List.of(FIRST_INDEX_NAME, SECOND_INDEX_NAME),
+                List.of(FIRST_INDEX_ALIAS, SECOND_INDEX_ALIAS)
+            );
+            String firstSongId = FIRST_INDEX_ID_SONG_3;
+            Song firstSong = FIRST_INDEX_SONGS_BY_ID.get(firstSongId);
+            String secondSongId = SECOND_INDEX_ID_SONG_4;
+            Song secondSong = SECOND_INDEX_SONGS_BY_ID.get(secondSongId);
+
+            for (List<String> indices : indicesToCheck) {
+                MultiSearchRequest request = new MultiSearchRequest();
+                request.add(queryByIdsRequest(indices.get(0), firstSongId));
+                request.add(queryByIdsRequest(indices.get(1), secondSongId));
+                MultiSearchResponse response = restHighLevelClient.msearch(request, DEFAULT);
+
+                assertThat(response, isSuccessfulMultiSearchResponse());
+                assertThat(response, numberOfSearchItemResponsesIsEqualTo(2));
+
+                MultiSearchResponse.Item[] responses = response.getResponses();
+
+                assertThat(
+                    responses[0].getResponse(),
+                    allOf(
+                        searchHitsContainDocumentWithId(0, FIRST_INDEX_NAME, firstSongId),
+                        searchHitContainsFieldWithValue(0, FIELD_TITLE, firstSong.getTitle()),
+                        searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(firstSong.getLyrics())),
+                        searchHitContainsFieldWithValue(0, FIELD_ARTIST, VALUE_TO_MASKED_VALUE.apply(firstSong.getArtist())),
+                        searchHitContainsFieldWithValue(0, FIELD_STARS, firstSong.getStars())
+                    )
+                );
+                assertThat(
+                    responses[1].getResponse(),
+                    allOf(
+                        searchHitsContainDocumentWithId(0, SECOND_INDEX_NAME, secondSongId),
+                        searchHitContainsFieldWithValue(0, FIELD_TITLE, secondSong.getTitle()),
+                        searchHitContainsFieldWithValue(0, FIELD_LYRICS, VALUE_TO_MASKED_VALUE.apply(secondSong.getLyrics())),
+                        searchHitContainsFieldWithValue(0, FIELD_ARTIST, secondSong.getArtist()),
+                        searchHitContainsFieldWithValue(0, FIELD_STARS, secondSong.getStars())
+                    )
+                );
+            }
+        }
+    }
+
+    @Test
+    public void getFieldCapabilities() throws IOException {
+        // FIELD MASKING
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(MASKED_ARTIST_LYRICS_READER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().indices(FIRST_INDEX_NAME)
+                .fields(FIELD_ARTIST, FIELD_TITLE, FIELD_LYRICS);
+            FieldCapabilitiesResponse response = restHighLevelClient.fieldCaps(request, DEFAULT);
+
+            assertThat(response, containsExactlyIndices(FIRST_INDEX_NAME));
+            assertThat(response, numberOfFieldsIsEqualTo(3));
+            assertThat(response, containsFieldWithNameAndType(FIELD_ARTIST, "text"));
+            assertThat(response, containsFieldWithNameAndType(FIELD_TITLE, "text"));
+            assertThat(response, containsFieldWithNameAndType(FIELD_LYRICS, "text"));
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/security/IndexOperationsHelper.java b/src/integrationTest/java/org/opensearch/security/IndexOperationsHelper.java
new file mode 100644
index 0000000000..7482558c5b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/IndexOperationsHelper.java
@@ -0,0 +1,66 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.util.Map;
+
+import org.opensearch.action.admin.indices.close.CloseIndexRequest;
+import org.opensearch.action.admin.indices.close.CloseIndexResponse;
+import org.opensearch.action.admin.indices.create.CreateIndexRequest;
+import org.opensearch.action.admin.indices.create.CreateIndexResponse;
+import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
+import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.is;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.indexExists;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.indexStateIsEqualTo;
+
+public class IndexOperationsHelper {
+
+    public static void createIndex(LocalCluster cluster, String indexName) {
+        createIndex(cluster, indexName, Settings.EMPTY);
+    }
+
+    public static void createIndex(LocalCluster cluster, String indexName, Settings settings) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            CreateIndexResponse createIndexResponse = client.admin()
+                .indices()
+                .create(new CreateIndexRequest(indexName).settings(settings))
+                .actionGet();
+
+            assertThat(createIndexResponse.isAcknowledged(), is(true));
+            assertThat(createIndexResponse.isShardsAcknowledged(), is(true));
+            assertThat(cluster, indexExists(indexName));
+        }
+    }
+
+    public static void closeIndex(LocalCluster cluster, String indexName) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            CloseIndexRequest closeIndexRequest = new CloseIndexRequest(indexName);
+            CloseIndexResponse response = client.admin().indices().close(closeIndexRequest).actionGet();
+
+            assertThat(response.isAcknowledged(), is(true));
+            assertThat(response.isShardsAcknowledged(), is(true));
+            assertThat(cluster, indexStateIsEqualTo(indexName, IndexMetadata.State.CLOSE));
+        }
+    }
+
+    public static void createMapping(LocalCluster cluster, String indexName, Map<String, Object> indexMapping) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            var response = client.admin().indices().putMapping(new PutMappingRequest(indexName).source(indexMapping)).actionGet();
+
+            assertThat(response.isAcknowledged(), is(true));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/IpBruteForceAttacksPreventionTests.java b/src/integrationTest/java/org/opensearch/security/IpBruteForceAttacksPreventionTests.java
new file mode 100644
index 0000000000..c881fa451e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/IpBruteForceAttacksPreventionTests.java
@@ -0,0 +1,166 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.util.concurrent.TimeUnit;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.AuthFailureListeners;
+import org.opensearch.test.framework.RateLimiting;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+import org.opensearch.test.framework.cluster.TestRestClientConfiguration;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static org.apache.http.HttpStatus.SC_OK;
+import static org.apache.http.HttpStatus.SC_UNAUTHORIZED;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL_WITHOUT_CHALLENGE;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.cluster.TestRestClientConfiguration.userWithSourceIp;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class IpBruteForceAttacksPreventionTests {
+    private static final User USER_1 = new User("simple-user-1").roles(ALL_ACCESS);
+    private static final User USER_2 = new User("simple-user-2").roles(ALL_ACCESS);
+
+    public static final int ALLOWED_TRIES = 3;
+    public static final int TIME_WINDOW_SECONDS = 3;
+
+    public static final String CLIENT_IP_2 = "127.0.0.2";
+    public static final String CLIENT_IP_3 = "127.0.0.3";
+    public static final String CLIENT_IP_4 = "127.0.0.4";
+    public static final String CLIENT_IP_5 = "127.0.0.5";
+    public static final String CLIENT_IP_6 = "127.0.0.6";
+    public static final String CLIENT_IP_7 = "127.0.0.7";
+    public static final String CLIENT_IP_8 = "127.0.0.8";
+    public static final String CLIENT_IP_9 = "127.0.0.9";
+
+    private static final AuthFailureListeners listener = new AuthFailureListeners().addRateLimit(
+        new RateLimiting("internal_authentication_backend_limiting").type("ip")
+            .allowedTries(ALLOWED_TRIES)
+            .timeWindowSeconds(TIME_WINDOW_SECONDS)
+            .blockExpirySeconds(2)
+            .maxBlockedClients(500)
+            .maxTrackedClients(500)
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authFailureListeners(listener)
+        .authc(AUTHC_HTTPBASIC_INTERNAL_WITHOUT_CHALLENGE)
+        .users(USER_1, USER_2)
+        .build();
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("org.opensearch.security.auth.BackendRegistry");
+
+    @Test
+    public void shouldAuthenticateUserWhenBlockadeIsNotActive() {
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_1, CLIENT_IP_2))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void shouldBlockIpAddress() {
+        authenticateUserWithIncorrectPassword(CLIENT_IP_3, USER_2, ALLOWED_TRIES);
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_2, CLIENT_IP_3))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_UNAUTHORIZED);
+            logsRule.assertThatContain("Rejecting REST request because of blocked address: /" + CLIENT_IP_3);
+        }
+    }
+
+    @Test
+    public void shouldBlockUsersWhoUseTheSameIpAddress() {
+        authenticateUserWithIncorrectPassword(CLIENT_IP_4, USER_1, ALLOWED_TRIES);
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_2, CLIENT_IP_4))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_UNAUTHORIZED);
+            logsRule.assertThatContain("Rejecting REST request because of blocked address: /" + CLIENT_IP_4);
+        }
+    }
+
+    @Test
+    public void testUserShouldBeAbleToAuthenticateFromAnotherNotBlockedIpAddress() {
+        authenticateUserWithIncorrectPassword(CLIENT_IP_5, USER_1, ALLOWED_TRIES);
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_1, CLIENT_IP_6))) {
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void shouldNotBlockIpWhenFailureAuthenticationCountIsLessThanAllowedTries() {
+        authenticateUserWithIncorrectPassword(CLIENT_IP_7, USER_1, ALLOWED_TRIES - 1);
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_1, CLIENT_IP_7))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void shouldBlockIpWhenFailureAuthenticationCountIsGreaterThanAllowedTries() {
+        authenticateUserWithIncorrectPassword(CLIENT_IP_8, USER_1, ALLOWED_TRIES * 2);
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_1, CLIENT_IP_8))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_UNAUTHORIZED);
+            logsRule.assertThatContain("Rejecting REST request because of blocked address: /" + CLIENT_IP_8);
+        }
+    }
+
+    @Test
+    public void shouldReleaseIpAddressLock() throws InterruptedException {
+        authenticateUserWithIncorrectPassword(CLIENT_IP_9, USER_1, ALLOWED_TRIES * 2);
+        TimeUnit.SECONDS.sleep(TIME_WINDOW_SECONDS);
+        try (TestRestClient client = cluster.createGenericClientRestClient(userWithSourceIp(USER_1, CLIENT_IP_9))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+            logsRule.assertThatContain("Rejecting REST request because of blocked address: /" + CLIENT_IP_9);
+        }
+    }
+
+    private static void authenticateUserWithIncorrectPassword(String sourceIpAddress, User user, int numberOfRequests) {
+        var clientConfiguration = new TestRestClientConfiguration().username(user.getName())
+            .password("incorrect password")
+            .sourceInetAddress(sourceIpAddress);
+        try (TestRestClient client = cluster.createGenericClientRestClient(clientConfiguration)) {
+            for (int i = 0; i < numberOfRequests; ++i) {
+                HttpResponse response = client.getAuthInfo();
+
+                response.assertStatusCode(SC_UNAUTHORIZED);
+            }
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/PointInTimeOperationTest.java b/src/integrationTest/java/org/opensearch/security/PointInTimeOperationTest.java
new file mode 100644
index 0000000000..6b6b52660f
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/PointInTimeOperationTest.java
@@ -0,0 +1,426 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+
+import com.carrotsearch.randomizedtesting.RandomizedRunner;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.commons.lang3.tuple.Pair;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.OpenSearchStatusException;
+import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
+import org.opensearch.action.index.IndexRequest;
+import org.opensearch.action.search.CreatePitRequest;
+import org.opensearch.action.search.CreatePitResponse;
+import org.opensearch.action.search.DeletePitRequest;
+import org.opensearch.action.search.DeletePitResponse;
+import org.opensearch.action.search.GetAllPitNodesResponse;
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.client.Client;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.search.builder.PointInTimeBuilder;
+import org.opensearch.search.builder.SearchSourceBuilder;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions.Type.ADD;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.core.rest.RestStatus.FORBIDDEN;
+import static org.opensearch.core.rest.RestStatus.OK;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.statusException;
+import static org.opensearch.test.framework.matcher.PitResponseMatchers.deleteResponseContainsExactlyPitWithIds;
+import static org.opensearch.test.framework.matcher.PitResponseMatchers.getAllResponseContainsExactlyPitWithIds;
+import static org.opensearch.test.framework.matcher.PitResponseMatchers.isSuccessfulCreatePitResponse;
+import static org.opensearch.test.framework.matcher.PitResponseMatchers.isSuccessfulDeletePitResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentsInAnyOrder;
+
+@RunWith(RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class PointInTimeOperationTest {
+
+    private static final String FIRST_SONG_INDEX = "song-index-1";
+    private static final String FIRST_INDEX_ALIAS = "song-index-1-alias";
+    private static final String SECOND_SONG_INDEX = "song-index-2";
+    private static final String SECOND_INDEX_ALIAS = "song-index-2-alias";
+
+    private static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    /**
+    * User who is allowed to perform PIT operations only on the {@link #FIRST_SONG_INDEX}
+    */
+    private static final TestSecurityConfig.User LIMITED_POINT_IN_TIME_USER = new TestSecurityConfig.User("limited_point_in_time_user")
+        .roles(
+            new TestSecurityConfig.Role("limited_point_in_time_user").indexPermissions(
+                "indices:data/read/point_in_time/create",
+                "indices:data/read/point_in_time/delete",
+                "indices:data/read/search",
+                "indices:data/read/point_in_time/readall", // anyway user needs the all indexes permission (*) to find all pits
+                "indices:monitor/point_in_time/segments" // anyway user needs the all indexes permission (*) to list all pits segments
+            ).on(FIRST_SONG_INDEX)
+        );
+    /**
+    * User who is allowed to perform PIT operations on all indices
+    */
+    private static final TestSecurityConfig.User POINT_IN_TIME_USER = new TestSecurityConfig.User("point_in_time_user").roles(
+        new TestSecurityConfig.Role("point_in_time_user").indexPermissions(
+            "indices:data/read/point_in_time/create",
+            "indices:data/read/point_in_time/delete",
+            "indices:data/read/search",
+            "indices:data/read/point_in_time/readall",
+            "indices:monitor/point_in_time/segments"
+        ).on("*")
+    );
+
+    private static final String ID_1 = "1";
+    private static final String ID_2 = "2";
+    private static final String ID_3 = "3";
+    private static final String ID_4 = "4";
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(FIRST_SONG_INDEX).id(ID_1).source(SONGS[0].asMap()))
+                .actionGet();
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(FIRST_SONG_INDEX).id(ID_2).source(SONGS[1].asMap()))
+                .actionGet();
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(FIRST_SONG_INDEX).id(ID_3).source(SONGS[2].asMap()))
+                .actionGet();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(FIRST_SONG_INDEX).alias(FIRST_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(SECOND_SONG_INDEX).id(ID_4).source(SONGS[3].asMap()))
+                .actionGet();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new IndicesAliasesRequest.AliasActions(ADD).indices(SECOND_SONG_INDEX).alias(SECOND_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+        }
+    }
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(false)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER, LIMITED_POINT_IN_TIME_USER, POINT_IN_TIME_USER)
+        .build();
+
+    @Test
+    public void createPit_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            CreatePitRequest createPitRequest = new CreatePitRequest(TimeValue.timeValueMinutes(30), false, FIRST_SONG_INDEX);
+
+            CreatePitResponse createPitResponse = restHighLevelClient.createPit(createPitRequest, DEFAULT);
+
+            assertThat(createPitResponse, isSuccessfulCreatePitResponse());
+        }
+    }
+
+    @Test
+    public void createPitWithIndexAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            CreatePitRequest createPitRequest = new CreatePitRequest(TimeValue.timeValueMinutes(30), false, FIRST_INDEX_ALIAS);
+
+            CreatePitResponse createPitResponse = restHighLevelClient.createPit(createPitRequest, DEFAULT);
+
+            assertThat(createPitResponse, isSuccessfulCreatePitResponse());
+        }
+    }
+
+    @Test
+    public void createPit_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            CreatePitRequest createPitRequest = new CreatePitRequest(TimeValue.timeValueMinutes(30), false, SECOND_SONG_INDEX);
+
+            assertThatThrownBy(() -> restHighLevelClient.createPit(createPitRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void createPitWithIndexAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            CreatePitRequest createPitRequest = new CreatePitRequest(TimeValue.timeValueMinutes(30), false, SECOND_INDEX_ALIAS);
+
+            assertThatThrownBy(() -> restHighLevelClient.createPit(createPitRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void listAllPits_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(POINT_IN_TIME_USER)) {
+            cleanUpPits();
+            String firstIndexPit = createPitForIndices(FIRST_SONG_INDEX);
+            String secondIndexPit = createPitForIndices(SECOND_SONG_INDEX);
+
+            GetAllPitNodesResponse getAllPitsResponse = restHighLevelClient.getAllPits(DEFAULT);
+
+            assertThat(getAllPitsResponse, getAllResponseContainsExactlyPitWithIds(firstIndexPit, secondIndexPit));
+        }
+    }
+
+    @Test
+    public void listAllPits_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            assertThatThrownBy(() -> restHighLevelClient.getAllPits(DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void deletePit_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(FIRST_SONG_INDEX);
+
+            DeletePitResponse deletePitResponse = restHighLevelClient.deletePit(new DeletePitRequest(existingPitId), DEFAULT);
+            assertThat(deletePitResponse, isSuccessfulDeletePitResponse());
+            assertThat(deletePitResponse, deleteResponseContainsExactlyPitWithIds(existingPitId));
+        }
+    }
+
+    @Test
+    public void deletePitCreatedWithIndexAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(FIRST_INDEX_ALIAS);
+
+            DeletePitResponse deletePitResponse = restHighLevelClient.deletePit(new DeletePitRequest(existingPitId), DEFAULT);
+            assertThat(deletePitResponse, isSuccessfulDeletePitResponse());
+            assertThat(deletePitResponse, deleteResponseContainsExactlyPitWithIds(existingPitId));
+        }
+    }
+
+    @Test
+    public void deletePit_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(SECOND_SONG_INDEX);
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.deletePit(new DeletePitRequest(existingPitId), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    public void deletePitCreatedWithIndexAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(SECOND_INDEX_ALIAS);
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.deletePit(new DeletePitRequest(existingPitId), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    public void deleteAllPits_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(POINT_IN_TIME_USER)) {
+            cleanUpPits();
+            String firstIndexPit = createPitForIndices(FIRST_SONG_INDEX);
+            String secondIndexPit = createPitForIndices(SECOND_SONG_INDEX);
+
+            DeletePitResponse deletePitResponse = restHighLevelClient.deleteAllPits(DEFAULT);
+            assertThat(deletePitResponse, isSuccessfulDeletePitResponse());
+            assertThat(deletePitResponse, deleteResponseContainsExactlyPitWithIds(firstIndexPit, secondIndexPit));
+        }
+    }
+
+    @Test
+    public void deleteAllPits_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            assertThatThrownBy(() -> restHighLevelClient.deleteAllPits(DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void searchWithPit_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(FIRST_SONG_INDEX);
+
+            SearchRequest searchRequest = new SearchRequest();
+            searchRequest.source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(existingPitId)));
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(
+                searchResponse,
+                searchHitsContainDocumentsInAnyOrder(
+                    Pair.of(FIRST_SONG_INDEX, ID_1),
+                    Pair.of(FIRST_SONG_INDEX, ID_2),
+                    Pair.of(FIRST_SONG_INDEX, ID_3)
+                )
+            );
+        }
+    }
+
+    @Test
+    public void searchWithPitCreatedWithIndexAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(FIRST_INDEX_ALIAS);
+
+            SearchRequest searchRequest = new SearchRequest();
+            searchRequest.source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(existingPitId)));
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(
+                searchResponse,
+                searchHitsContainDocumentsInAnyOrder(
+                    Pair.of(FIRST_SONG_INDEX, ID_1),
+                    Pair.of(FIRST_SONG_INDEX, ID_2),
+                    Pair.of(FIRST_SONG_INDEX, ID_3)
+                )
+            );
+        }
+    }
+
+    @Test
+    public void searchWithPit_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(SECOND_SONG_INDEX);
+
+            SearchRequest searchRequest = new SearchRequest();
+            searchRequest.source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(existingPitId)));
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void searchWithPitCreatedWithIndexAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(SECOND_INDEX_ALIAS);
+
+            SearchRequest searchRequest = new SearchRequest();
+            searchRequest.source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(existingPitId)));
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void listPitSegments_positive() throws IOException {
+        try (TestRestClient restClient = cluster.getRestClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(FIRST_SONG_INDEX);
+            String body = String.format("{\"pit_id\":[\"%s\"]}", existingPitId);
+            HttpResponse response = restClient.putJson("/_cat/pit_segments", body);
+
+            response.assertStatusCode(OK.getStatus());
+        }
+    }
+
+    @Test
+    public void listPitSegmentsCreatedWithIndexAlias_positive() throws IOException {
+        try (TestRestClient restClient = cluster.getRestClient(POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(FIRST_INDEX_ALIAS);
+            String body = String.format("{\"pit_id\":[\"%s\"]}", existingPitId);
+            HttpResponse response = restClient.putJson("/_cat/pit_segments", body);
+
+            response.assertStatusCode(OK.getStatus());
+        }
+    }
+
+    @Test
+    public void listPitSegments_negative() throws IOException {
+        try (TestRestClient restClient = cluster.getRestClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(SECOND_SONG_INDEX);
+            String body = String.format("{\"pit_id\":[\"%s\"]}", existingPitId);
+            HttpResponse response = restClient.putJson("/_cat/pit_segments", body);
+
+            response.assertStatusCode(FORBIDDEN.getStatus());
+        }
+    }
+
+    @Test
+    public void listPitSegmentsCreatedWithIndexAlias_negative() throws IOException {
+        try (TestRestClient restClient = cluster.getRestClient(LIMITED_POINT_IN_TIME_USER)) {
+            String existingPitId = createPitForIndices(SECOND_INDEX_ALIAS);
+            String body = String.format("{\"pit_id\":[\"%s\"]}", existingPitId);
+            HttpResponse response = restClient.putJson("/_cat/pit_segments", body);
+
+            response.assertStatusCode(FORBIDDEN.getStatus());
+        }
+    }
+
+    @Test
+    public void listAllPitSegments_positive() {
+        try (TestRestClient restClient = cluster.getRestClient(POINT_IN_TIME_USER)) {
+            HttpResponse response = restClient.get("/_cat/pit_segments/_all");
+
+            response.assertStatusCode(OK.getStatus());
+        }
+    }
+
+    @Test
+    public void listAllPitSegments_negative() {
+        try (TestRestClient restClient = cluster.getRestClient(LIMITED_POINT_IN_TIME_USER)) {
+            HttpResponse response = restClient.get("/_cat/pit_segments/_all");
+
+            response.assertStatusCode(FORBIDDEN.getStatus());
+        }
+    }
+
+    /**
+    * Creates PIT for given indices. Returns PIT id.
+    */
+    private String createPitForIndices(String... indices) throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            CreatePitRequest createPitRequest = new CreatePitRequest(TimeValue.timeValueMinutes(30), false, indices);
+
+            CreatePitResponse createPitResponse = restHighLevelClient.createPit(createPitRequest, DEFAULT);
+
+            assertThat(createPitResponse, isSuccessfulCreatePitResponse());
+            return createPitResponse.getId();
+        }
+    }
+
+    /**
+    * Deletes all PITs.
+    */
+    public void cleanUpPits() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            try {
+                restHighLevelClient.deleteAllPits(DEFAULT);
+            } catch (OpenSearchStatusException ex) {
+                if (ex.status() != RestStatus.NOT_FOUND) {
+                    throw ex;
+                }
+                // tried to remove pits but no pit exists
+            }
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/security/SearchOperationTest.java b/src/integrationTest/java/org/opensearch/security/SearchOperationTest.java
new file mode 100644
index 0000000000..f16d40e905
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/SearchOperationTest.java
@@ -0,0 +1,2719 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.google.common.base.Stopwatch;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.awaitility.Awaitility;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
+import org.opensearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
+import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.opensearch.action.admin.indices.alias.Alias;
+import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
+import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
+import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
+import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
+import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
+import org.opensearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.opensearch.action.admin.indices.open.OpenIndexRequest;
+import org.opensearch.action.admin.indices.open.OpenIndexResponse;
+import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest;
+import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
+import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest;
+import org.opensearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
+import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
+import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
+import org.opensearch.action.bulk.BulkRequest;
+import org.opensearch.action.bulk.BulkResponse;
+import org.opensearch.action.delete.DeleteRequest;
+import org.opensearch.action.delete.DeleteResponse;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesRequest;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+import org.opensearch.action.get.GetRequest;
+import org.opensearch.action.get.GetResponse;
+import org.opensearch.action.get.MultiGetItemResponse;
+import org.opensearch.action.get.MultiGetRequest;
+import org.opensearch.action.get.MultiGetRequest.Item;
+import org.opensearch.action.get.MultiGetResponse;
+import org.opensearch.action.index.IndexRequest;
+import org.opensearch.action.search.MultiSearchRequest;
+import org.opensearch.action.search.MultiSearchResponse;
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.action.search.SearchScrollRequest;
+import org.opensearch.action.update.UpdateRequest;
+import org.opensearch.action.update.UpdateResponse;
+import org.opensearch.client.Client;
+import org.opensearch.client.ClusterAdminClient;
+import org.opensearch.client.IndicesAdminClient;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.client.core.CountRequest;
+import org.opensearch.client.indices.CloseIndexRequest;
+import org.opensearch.client.indices.CloseIndexResponse;
+import org.opensearch.client.indices.CreateIndexRequest;
+import org.opensearch.client.indices.CreateIndexResponse;
+import org.opensearch.client.indices.GetIndexRequest;
+import org.opensearch.client.indices.GetIndexResponse;
+import org.opensearch.client.indices.GetMappingsRequest;
+import org.opensearch.client.indices.GetMappingsResponse;
+import org.opensearch.client.indices.PutIndexTemplateRequest;
+import org.opensearch.client.indices.PutMappingRequest;
+import org.opensearch.client.indices.ResizeRequest;
+import org.opensearch.client.indices.ResizeResponse;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.cluster.metadata.IndexTemplateMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.index.query.BoolQueryBuilder;
+import org.opensearch.index.query.MatchQueryBuilder;
+import org.opensearch.index.query.QueryBuilders;
+import org.opensearch.index.reindex.BulkByScrollResponse;
+import org.opensearch.index.reindex.ReindexRequest;
+import org.opensearch.repositories.RepositoryMissingException;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.search.builder.SearchSourceBuilder;
+import org.opensearch.test.framework.AuditCompliance;
+import org.opensearch.test.framework.AuditConfiguration;
+import org.opensearch.test.framework.AuditFilters;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.audit.AuditLogsRule;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.arrayContaining;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasProperty;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
+import static org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions.Type.ADD;
+import static org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions.Type.REMOVE;
+import static org.opensearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions.Type.REMOVE_INDEX;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.rest.RestRequest.Method.DELETE;
+import static org.opensearch.rest.RestRequest.Method.GET;
+import static org.opensearch.rest.RestRequest.Method.POST;
+import static org.opensearch.rest.RestRequest.Method.PUT;
+import static org.opensearch.core.rest.RestStatus.ACCEPTED;
+import static org.opensearch.core.rest.RestStatus.FORBIDDEN;
+import static org.opensearch.core.rest.RestStatus.INTERNAL_SERVER_ERROR;
+import static org.opensearch.security.Song.FIELD_ARTIST;
+import static org.opensearch.security.Song.FIELD_STARS;
+import static org.opensearch.security.Song.FIELD_TITLE;
+import static org.opensearch.security.Song.QUERY_TITLE_MAGNUM_OPUS;
+import static org.opensearch.security.Song.QUERY_TITLE_NEXT_SONG;
+import static org.opensearch.security.Song.QUERY_TITLE_POISON;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.Song.TITLE_MAGNUM_OPUS;
+import static org.opensearch.security.Song.TITLE_NEXT_SONG;
+import static org.opensearch.security.Song.TITLE_POISON;
+import static org.opensearch.security.Song.TITLE_SONG_1_PLUS_1;
+import static org.opensearch.security.auditlog.impl.AuditCategory.INDEX_EVENT;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.auditPredicate;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.grantedPrivilege;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.missingPrivilege;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.userAuthenticated;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.averageAggregationRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.getSearchScrollRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.queryStringQueryRequest;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.searchRequestWithScroll;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.statsAggregationRequest;
+import static org.opensearch.test.framework.matcher.BulkResponseMatchers.bulkResponseContainExceptions;
+import static org.opensearch.test.framework.matcher.BulkResponseMatchers.failureBulkResponse;
+import static org.opensearch.test.framework.matcher.BulkResponseMatchers.successBulkResponse;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.aliasExists;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.clusterContainSuccessSnapshot;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.clusterContainTemplate;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.clusterContainTemplateWithAlias;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.clusterContainsDocument;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.clusterContainsDocumentWithFieldValue;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.clusterContainsSnapshotRepository;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.indexExists;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.indexMappingIsEqualTo;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.indexSettingsContainValues;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.indexStateIsEqualTo;
+import static org.opensearch.test.framework.matcher.ClusterMatchers.snapshotInClusterDoesNotExists;
+import static org.opensearch.test.framework.matcher.DeleteResponseMatchers.isSuccessfulDeleteResponse;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+import static org.opensearch.test.framework.matcher.FieldCapabilitiesResponseMatchers.containsExactlyIndices;
+import static org.opensearch.test.framework.matcher.FieldCapabilitiesResponseMatchers.containsFieldWithNameAndType;
+import static org.opensearch.test.framework.matcher.FieldCapabilitiesResponseMatchers.numberOfFieldsIsEqualTo;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.containDocument;
+import static org.opensearch.test.framework.matcher.GetResponseMatchers.documentContainField;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.getIndexResponseContainsIndices;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.getMappingsResponseContainsIndices;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.getSettingsResponseContainsIndices;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.isSuccessfulClearIndicesCacheResponse;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.isSuccessfulCloseIndexResponse;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.isSuccessfulCreateIndexResponse;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.isSuccessfulOpenIndexResponse;
+import static org.opensearch.test.framework.matcher.IndexResponseMatchers.isSuccessfulResizeResponse;
+import static org.opensearch.test.framework.matcher.MultiGetResponseMatchers.isSuccessfulMultiGetResponse;
+import static org.opensearch.test.framework.matcher.MultiGetResponseMatchers.numberOfGetItemResponsesIsEqualTo;
+import static org.opensearch.test.framework.matcher.MultiSearchResponseMatchers.isSuccessfulMultiSearchResponse;
+import static org.opensearch.test.framework.matcher.MultiSearchResponseMatchers.numberOfSearchItemResponsesIsEqualTo;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.errorMessageContain;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.statusException;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.containAggregationWithNameAndType;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.containNotEmptyScrollingId;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfHitsInPageIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfTotalHitsIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitContainsFieldWithValue;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentWithId;
+import static org.opensearch.test.framework.matcher.UpdateResponseMatchers.isSuccessfulUpdateResponse;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class SearchOperationTest {
+
+    private static final Logger log = LogManager.getLogger(SearchOperationTest.class);
+
+    public static final String SONG_INDEX_NAME = "song_lyrics";
+    public static final String PROHIBITED_SONG_INDEX_NAME = "prohibited_song_lyrics";
+    public static final String WRITE_SONG_INDEX_NAME = "write_song_index";
+
+    public static final String SONG_LYRICS_ALIAS = "song_lyrics_index_alias";
+    public static final String PROHIBITED_SONG_ALIAS = "prohibited_song_lyrics_index_alias";
+    private static final String COLLECTIVE_INDEX_ALIAS = "collective-index-alias";
+    private static final String TEMPLATE_INDEX_PREFIX = "song-transcription*";
+    public static final String TEMPORARY_ALIAS_NAME = "temporary-alias";
+    public static final String ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001 = "alias-used-in-musical-index-template-0001";
+    public static final String ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002 = "alias-used-in-musical-index-template-0002";
+    public static final String ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0003 = "alias-used-in-musical-index-template-0003";
+    public static final String INDEX_NAME_SONG_TRANSCRIPTION_JAZZ = "song-transcription-jazz";
+
+    public static final String MUSICAL_INDEX_TEMPLATE = "musical-index-template";
+    public static final String ALIAS_CREATE_INDEX_WITH_ALIAS_POSITIVE = "alias_create_index_with_alias_positive";
+    public static final String ALIAS_CREATE_INDEX_WITH_ALIAS_NEGATIVE = "alias_create_index_with_alias_negative";
+
+    public static final String UNDELETABLE_TEMPLATE_NAME = "undeletable-template-name";
+
+    public static final String ALIAS_FROM_UNDELETABLE_TEMPLATE = "alias-from-undeletable-template";
+
+    public static final String TEST_SNAPSHOT_REPOSITORY_NAME = "test-snapshot-repository";
+
+    public static final String UNUSED_SNAPSHOT_REPOSITORY_NAME = "unused-snapshot-repository";
+
+    public static final String RESTORED_SONG_INDEX_NAME = "restored_" + WRITE_SONG_INDEX_NAME;
+
+    public static final String UPDATE_DELETE_OPERATION_INDEX_NAME = "update_delete_index";
+
+    public static final String DOCUMENT_TO_UPDATE_ID = "doc_to_update";
+
+    private static final String ID_P4 = "4";
+    private static final String ID_S3 = "3";
+    private static final String ID_S2 = "2";
+    private static final String ID_S1 = "1";
+
+    static final User ADMIN_USER = new User("admin").roles(ALL_ACCESS);
+
+    /**
+    * All user read permissions are related to {@link #SONG_INDEX_NAME} index
+    */
+    static final User LIMITED_READ_USER = new User("limited_read_user").roles(
+        new Role("limited-song-reader").clusterPermissions(
+            "indices:data/read/mget",
+            "indices:data/read/msearch",
+            "indices:data/read/scroll"
+        )
+            .indexPermissions(
+                "indices:data/read/search",
+                "indices:data/read/get",
+                "indices:data/read/mget*",
+                "indices:admin/aliases",
+                "indices:data/read/field_caps",
+                "indices:data/read/field_caps*"
+            )
+            .on(SONG_INDEX_NAME)
+    );
+
+    static final User LIMITED_WRITE_USER = new User("limited_write_user").roles(
+        new Role("limited-write-role").clusterPermissions(
+            "indices:data/write/bulk",
+            "indices:admin/template/put",
+            "indices:admin/template/delete",
+            "cluster:admin/repository/put",
+            "cluster:admin/repository/delete",
+            "cluster:admin/snapshot/create",
+            "cluster:admin/snapshot/status",
+            "cluster:admin/snapshot/status[nodes]",
+            "cluster:admin/snapshot/delete",
+            "cluster:admin/snapshot/get",
+            "cluster:admin/snapshot/restore"
+        )
+            .indexPermissions(
+                "indices:data/write/index",
+                "indices:data/write/bulk[s]",
+                "indices:admin/create",
+                "indices:admin/mapping/put",
+                "indices:data/write/update",
+                "indices:data/write/bulk[s]",
+                "indices:data/write/delete",
+                "indices:data/write/bulk[s]"
+            )
+            .on(WRITE_SONG_INDEX_NAME),
+        new Role("transcription-role").indexPermissions(
+            "indices:data/write/index",
+            "indices:admin/create",
+            "indices:data/write/bulk[s]",
+            "indices:admin/mapping/put"
+        ).on(INDEX_NAME_SONG_TRANSCRIPTION_JAZZ),
+        new Role("limited-write-index-restore-role").indexPermissions(
+            "indices:data/write/index",
+            "indices:admin/create",
+            "indices:data/read/search"
+        ).on(RESTORED_SONG_INDEX_NAME)
+    );
+
+    /**
+    * User who is allowed read both index {@link #SONG_INDEX_NAME} and {@link #PROHIBITED_SONG_INDEX_NAME}
+    */
+    static final User DOUBLE_READER_USER = new User("double_read_user").roles(
+        new Role("full-song-reader").indexPermissions("indices:data/read/search").on(SONG_INDEX_NAME, PROHIBITED_SONG_INDEX_NAME)
+    );
+
+    static final User REINDEXING_USER = new User("reindexing_user").roles(
+        new Role("song-reindexing-target-write").clusterPermissions("indices:data/write/reindex", "indices:data/write/bulk")
+            .indexPermissions("indices:admin/create", "indices:data/write/index", "indices:data/write/bulk[s]", "indices:admin/mapping/put")
+            .on(WRITE_SONG_INDEX_NAME),
+        new Role("song-reindexing-source-read").clusterPermissions("indices:data/read/scroll")
+            .indexPermissions("indices:data/read/search")
+            .on(SONG_INDEX_NAME)
+    );
+
+    private Client internalClient;
+    /**
+    * User who is allowed to update and delete documents on index {@link #UPDATE_DELETE_OPERATION_INDEX_NAME}
+    */
+    static final User UPDATE_DELETE_USER = new User("update_delete_user").roles(
+        new Role("document-updater").clusterPermissions("indices:data/write/bulk")
+            .indexPermissions(
+                "indices:data/write/update",
+                "indices:data/write/index",
+                "indices:data/write/bulk[s]",
+                "indices:admin/mapping/put"
+            )
+            .on(UPDATE_DELETE_OPERATION_INDEX_NAME),
+        new Role("document-remover").indexPermissions("indices:data/write/delete").on(UPDATE_DELETE_OPERATION_INDEX_NAME)
+    );
+
+    static final String INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX = "index_operations_";
+
+    /**
+    * User who is allowed to perform index-related operations on
+    * indices with names prefixed by the {@link #INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX}
+    */
+    static final User USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES = new User("index-operation-tester").roles(
+        new Role("index-manager").indexPermissions(
+            "indices:admin/create",
+            "indices:admin/get",
+            "indices:admin/delete",
+            "indices:admin/close",
+            "indices:admin/close*",
+            "indices:admin/open",
+            "indices:admin/resize",
+            "indices:monitor/stats",
+            "indices:monitor/settings/get",
+            "indices:admin/settings/update",
+            "indices:admin/mapping/put",
+            "indices:admin/mappings/get",
+            "indices:admin/cache/clear",
+            "indices:admin/aliases"
+        ).on(INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("*"))
+    );
+
+    private static User USER_ALLOWED_TO_CREATE_INDEX = new User("user-allowed-to-create-index").roles(
+        new Role("create-index-role").indexPermissions("indices:admin/create").on("*")
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(false)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(
+            ADMIN_USER,
+            LIMITED_READ_USER,
+            LIMITED_WRITE_USER,
+            DOUBLE_READER_USER,
+            REINDEXING_USER,
+            UPDATE_DELETE_USER,
+            USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES,
+            USER_ALLOWED_TO_CREATE_INDEX
+        )
+        .audit(
+            new AuditConfiguration(true).compliance(new AuditCompliance().enabled(true))
+                .filters(new AuditFilters().enabledRest(true).enabledTransport(true))
+        )
+        .build();
+
+    @Rule
+    public AuditLogsRule auditLogsRule = new AuditLogsRule();
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.prepareIndex(SONG_INDEX_NAME).setId(ID_S1).setRefreshPolicy(IMMEDIATE).setSource(SONGS[0].asMap()).get();
+            client.prepareIndex(UPDATE_DELETE_OPERATION_INDEX_NAME)
+                .setId(DOCUMENT_TO_UPDATE_ID)
+                .setRefreshPolicy(IMMEDIATE)
+                .setSource("field", "value")
+                .get();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(new AliasActions(ADD).indices(SONG_INDEX_NAME).alias(SONG_LYRICS_ALIAS))
+                )
+                .actionGet();
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(SONG_INDEX_NAME).id(ID_S2).source(SONGS[1].asMap()))
+                .actionGet();
+            client.index(new IndexRequest().setRefreshPolicy(IMMEDIATE).index(SONG_INDEX_NAME).id(ID_S3).source(SONGS[2].asMap()))
+                .actionGet();
+
+            client.prepareIndex(PROHIBITED_SONG_INDEX_NAME).setId(ID_P4).setSource(SONGS[3].asMap()).setRefreshPolicy(IMMEDIATE).get();
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new AliasActions(ADD).indices(PROHIBITED_SONG_INDEX_NAME).alias(PROHIBITED_SONG_ALIAS)
+                    )
+                )
+                .actionGet();
+
+            client.admin()
+                .indices()
+                .aliases(
+                    new IndicesAliasesRequest().addAliasAction(
+                        new AliasActions(ADD).indices(SONG_INDEX_NAME, PROHIBITED_SONG_INDEX_NAME).alias(COLLECTIVE_INDEX_ALIAS)
+                    )
+                )
+                .actionGet();
+            var createTemplateRequest = new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest(
+                UNDELETABLE_TEMPLATE_NAME
+            );
+            createTemplateRequest.patterns(List.of("pattern-does-not-match-to-any-index"));
+            createTemplateRequest.alias(new Alias(ALIAS_FROM_UNDELETABLE_TEMPLATE));
+            client.admin().indices().putTemplate(createTemplateRequest).actionGet();
+
+            client.admin()
+                .cluster()
+                .putRepository(
+                    new PutRepositoryRequest(UNUSED_SNAPSHOT_REPOSITORY_NAME).type("fs")
+                        .settings(Map.of("location", cluster.getSnapshotDirPath()))
+                )
+                .actionGet();
+        }
+    }
+
+    @Before
+    public void retrieveClusterClient() {
+        this.internalClient = cluster.getInternalNodeClient();
+    }
+
+    @After
+    public void cleanData() throws ExecutionException, InterruptedException {
+        Stopwatch stopwatch = Stopwatch.createStarted();
+        IndicesAdminClient indices = internalClient.admin().indices();
+        List<String> indicesToBeDeleted = List.of(
+            WRITE_SONG_INDEX_NAME,
+            INDEX_NAME_SONG_TRANSCRIPTION_JAZZ,
+            RESTORED_SONG_INDEX_NAME,
+            INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("*")
+        );
+        for (String indexToBeDeleted : indicesToBeDeleted) {
+            IndicesExistsRequest indicesExistsRequest = new IndicesExistsRequest(indexToBeDeleted);
+            var indicesExistsResponse = indices.exists(indicesExistsRequest).get();
+            if (indicesExistsResponse.isExists()) {
+                DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexToBeDeleted);
+                indices.delete(deleteIndexRequest).actionGet();
+                Awaitility.await().ignoreExceptions().until(() -> indices.exists(indicesExistsRequest).get().isExists() == false);
+            }
+        }
+
+        List<String> aliasesToBeDeleted = List.of(
+            TEMPORARY_ALIAS_NAME,
+            ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001,
+            ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002,
+            ALIAS_CREATE_INDEX_WITH_ALIAS_POSITIVE,
+            ALIAS_CREATE_INDEX_WITH_ALIAS_NEGATIVE
+        );
+        for (String aliasToBeDeleted : aliasesToBeDeleted) {
+            if (indices.exists(new IndicesExistsRequest(aliasToBeDeleted)).get().isExists()) {
+                AliasActions aliasAction = new AliasActions(AliasActions.Type.REMOVE).indices(SONG_INDEX_NAME).alias(aliasToBeDeleted);
+                internalClient.admin().indices().aliases(new IndicesAliasesRequest().addAliasAction(aliasAction)).get();
+            }
+        }
+
+        GetIndexTemplatesResponse response = indices.getTemplates(new GetIndexTemplatesRequest(MUSICAL_INDEX_TEMPLATE)).get();
+        for (IndexTemplateMetadata metadata : response.getIndexTemplates()) {
+            indices.deleteTemplate(new DeleteIndexTemplateRequest(metadata.getName())).get();
+        }
+
+        ClusterAdminClient clusterClient = internalClient.admin().cluster();
+        try {
+            clusterClient.deleteRepository(new DeleteRepositoryRequest(TEST_SNAPSHOT_REPOSITORY_NAME)).actionGet();
+        } catch (RepositoryMissingException e) {
+            log.debug("Repository '{}' does not exist. This is expected in most of test cases", TEST_SNAPSHOT_REPOSITORY_NAME, e);
+        }
+        internalClient.close();
+        log.debug("Cleaning data after test took {}", stopwatch.stop());
+    }
+
+    @Test
+    public void shouldSearchForDocuments_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(SONG_INDEX_NAME, QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S1));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldSearchForDocuments_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(PROHIBITED_SONG_INDEX_NAME, QUERY_TITLE_POISON);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/prohibited_song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(SONG_LYRICS_ALIAS, QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S1));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/song_lyrics_index_alias/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldSearchForDocumentsViaAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(PROHIBITED_SONG_ALIAS, QUERY_TITLE_POISON);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/prohibited_song_lyrics_index_alias/_search")
+        );
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchSongViaMultiIndexAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DOUBLE_READER_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(COLLECTIVE_INDEX_ALIAS, QUERY_TITLE_NEXT_SONG);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S3));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_NEXT_SONG));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/collective-index-alias/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(DOUBLE_READER_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchSongViaMultiIndexAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(COLLECTIVE_INDEX_ALIAS, QUERY_TITLE_POISON);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/collective-index-alias/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchAllIndexes_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S1));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(ADMIN_USER).withRestRequest(POST, "/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(ADMIN_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchAllIndexes_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest(QUERY_TITLE_MAGNUM_OPUS);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchSongIndexesWithAsterisk_prohibitedSongIndex_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DOUBLE_READER_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("*" + SONG_INDEX_NAME, QUERY_TITLE_POISON);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, PROHIBITED_SONG_INDEX_NAME, ID_P4));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_POISON));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/*song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(DOUBLE_READER_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchSongIndexesWithAsterisk_singIndex_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DOUBLE_READER_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("*" + SONG_INDEX_NAME, QUERY_TITLE_NEXT_SONG);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S3));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_NEXT_SONG));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/*song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(DOUBLE_READER_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAbleToSearchSongIndexesWithAsterisk_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("*" + SONG_INDEX_NAME, QUERY_TITLE_NEXT_SONG);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/*song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldFindSongUsingDslQuery_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = new SearchRequest(SONG_INDEX_NAME);
+            SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+            BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
+            boolQueryBuilder.filter(QueryBuilders.regexpQuery(FIELD_ARTIST, "f.+"));
+            boolQueryBuilder.filter(new MatchQueryBuilder(FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            searchSourceBuilder.query(boolQueryBuilder);
+            searchRequest.source(searchSourceBuilder);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S1));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldFindSongUsingDslQuery_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = new SearchRequest(PROHIBITED_SONG_INDEX_NAME);
+            SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+            BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
+            boolQueryBuilder.filter(QueryBuilders.regexpQuery(FIELD_ARTIST, "n.+"));
+            boolQueryBuilder.filter(new MatchQueryBuilder(FIELD_TITLE, TITLE_POISON));
+            searchSourceBuilder.query(boolQueryBuilder);
+            searchRequest.source(searchSourceBuilder);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/prohibited_song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldPerformSearchWithAllIndexAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("_all", QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S1));
+            assertThat(searchResponse, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(ADMIN_USER).withRestRequest(POST, "/_all/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(ADMIN_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldPerformSearchWithAllIndexAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = queryStringQueryRequest("_all", QUERY_TITLE_MAGNUM_OPUS);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_all/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldScrollOverSearchResults_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = searchRequestWithScroll(SONG_INDEX_NAME, 2);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containNotEmptyScrollingId());
+
+            SearchScrollRequest scrollRequest = getSearchScrollRequest(searchResponse);
+
+            SearchResponse scrollResponse = restHighLevelClient.scroll(scrollRequest, DEFAULT);
+            assertThat(scrollResponse, isSuccessfulSearchResponse());
+            assertThat(scrollResponse, containNotEmptyScrollingId());
+            assertThat(scrollResponse, numberOfTotalHitsIsEqualTo(3));
+            assertThat(scrollResponse, numberOfHitsInPageIsEqualTo(1));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest"));
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_search/scroll"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchScrollRequest"));
+    }
+
+    @Test
+    public void shouldScrollOverSearchResults_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DOUBLE_READER_USER)) {
+            SearchRequest searchRequest = searchRequestWithScroll(SONG_INDEX_NAME, 2);
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containNotEmptyScrollingId());
+
+            SearchScrollRequest scrollRequest = getSearchScrollRequest(searchResponse);
+
+            assertThatThrownBy(() -> restHighLevelClient.scroll(scrollRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(DOUBLE_READER_USER, "SearchRequest"));
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/_search/scroll"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(DOUBLE_READER_USER, "SearchScrollRequest"));
+    }
+
+    @Test
+    public void shouldGetDocument_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            GetResponse response = restHighLevelClient.get(new GetRequest(SONG_INDEX_NAME, ID_S1), DEFAULT);
+
+            assertThat(response, containDocument(SONG_INDEX_NAME, ID_S1));
+            assertThat(response, documentContainField(FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(GET, "/song_lyrics/_doc/1"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "GetRequest"));
+    }
+
+    @Test
+    public void shouldGetDocument_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            GetRequest getRequest = new GetRequest(PROHIBITED_SONG_INDEX_NAME, ID_P4);
+            assertThatThrownBy(() -> restHighLevelClient.get(getRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(GET, "/prohibited_song_lyrics/_doc/4"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "GetRequest"));
+    }
+
+    @Test
+    public void shouldPerformMultiGetDocuments_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            MultiGetRequest request = new MultiGetRequest();
+            request.add(new Item(SONG_INDEX_NAME, ID_S1));
+            request.add(new Item(SONG_INDEX_NAME, ID_S2));
+
+            MultiGetResponse response = restHighLevelClient.mget(request, DEFAULT);
+
+            assertThat(response, is(notNullValue()));
+            assertThat(response, isSuccessfulMultiGetResponse());
+            assertThat(response, numberOfGetItemResponsesIsEqualTo(2));
+
+            MultiGetItemResponse[] responses = response.getResponses();
+            assertThat(
+                responses[0].getResponse(),
+                allOf(containDocument(SONG_INDEX_NAME, ID_S1), documentContainField(FIELD_TITLE, TITLE_MAGNUM_OPUS))
+            );
+            assertThat(
+                responses[1].getResponse(),
+                allOf(containDocument(SONG_INDEX_NAME, ID_S2), documentContainField(FIELD_TITLE, TITLE_SONG_1_PLUS_1))
+            );
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_mget"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "MultiGetRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "MultiGetShardRequest"));
+    }
+
+    @Test
+    public void shouldPerformMultiGetDocuments_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DOUBLE_READER_USER)) {
+            MultiGetRequest request = new MultiGetRequest();
+            request.add(new Item(SONG_INDEX_NAME, ID_S1));
+
+            assertThatThrownBy(() -> restHighLevelClient.mget(request, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/_mget"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(DOUBLE_READER_USER, "MultiGetRequest"));
+    }
+
+    @Test
+    public void shouldPerformMultiGetDocuments_partiallyPositive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            MultiGetRequest request = new MultiGetRequest();
+            request.add(new Item(SONG_INDEX_NAME, ID_S1));
+            request.add(new Item(PROHIBITED_SONG_INDEX_NAME, ID_P4));
+
+            MultiGetResponse response = restHighLevelClient.mget(request, DEFAULT);
+
+            assertThat(request, notNullValue());
+            assertThat(response, not(isSuccessfulMultiGetResponse()));
+            assertThat(response, numberOfGetItemResponsesIsEqualTo(2));
+
+            MultiGetItemResponse[] responses = response.getResponses();
+            assertThat(responses, arrayContaining(hasProperty("failure", nullValue()), hasProperty("failure", notNullValue())));
+            assertThat(responses[1].getFailure().getFailure(), statusException(INTERNAL_SERVER_ERROR));
+            assertThat(responses[1].getFailure().getFailure(), errorMessageContain("security_exception"));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_mget"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "MultiGetRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "MultiGetShardRequest").withIndex(SONG_INDEX_NAME));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "MultiGetShardRequest").withIndex(PROHIBITED_SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldBeAllowedToPerformMulitSearch_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            MultiSearchRequest request = new MultiSearchRequest();
+            request.add(queryStringQueryRequest(SONG_INDEX_NAME, QUERY_TITLE_MAGNUM_OPUS));
+            request.add(queryStringQueryRequest(SONG_INDEX_NAME, QUERY_TITLE_NEXT_SONG));
+
+            MultiSearchResponse response = restHighLevelClient.msearch(request, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response, isSuccessfulMultiSearchResponse());
+            assertThat(response, numberOfSearchItemResponsesIsEqualTo(2));
+
+            MultiSearchResponse.Item[] responses = response.getResponses();
+
+            assertThat(responses[0].getResponse(), searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            assertThat(responses[0].getResponse(), searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S1));
+            assertThat(responses[1].getResponse(), searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_NEXT_SONG));
+            assertThat(responses[1].getResponse(), searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, ID_S3));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_msearch"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "MultiSearchRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldBeAllowedToPerformMulitSearch_partiallyPositive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            MultiSearchRequest request = new MultiSearchRequest();
+            request.add(queryStringQueryRequest(SONG_INDEX_NAME, QUERY_TITLE_MAGNUM_OPUS));
+            request.add(queryStringQueryRequest(PROHIBITED_SONG_INDEX_NAME, QUERY_TITLE_POISON));
+
+            MultiSearchResponse response = restHighLevelClient.msearch(request, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response, not(isSuccessfulMultiSearchResponse()));
+            assertThat(response, numberOfSearchItemResponsesIsEqualTo(2));
+
+            MultiSearchResponse.Item[] responses = response.getResponses();
+            assertThat(responses[0].getFailure(), nullValue());
+            assertThat(responses[1].getFailure(), statusException(INTERNAL_SERVER_ERROR));
+            assertThat(responses[1].getFailure(), errorMessageContain("security_exception"));
+            assertThat(responses[1].getResponse(), nullValue());
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_msearch"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "MultiSearchRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest").withIndex(SONG_INDEX_NAME));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest").withIndex(PROHIBITED_SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldBeAllowedToPerformMulitSearch_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(DOUBLE_READER_USER)) {
+            MultiSearchRequest request = new MultiSearchRequest();
+            request.add(queryStringQueryRequest(SONG_INDEX_NAME, QUERY_TITLE_MAGNUM_OPUS));
+            request.add(queryStringQueryRequest(SONG_INDEX_NAME, QUERY_TITLE_NEXT_SONG));
+
+            assertThatThrownBy(() -> restHighLevelClient.msearch(request, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(DOUBLE_READER_USER).withRestRequest(POST, "/_msearch"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(DOUBLE_READER_USER, "MultiSearchRequest"));
+    }
+
+    @Test
+    public void shouldAggregateDataAndComputeAverage_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            final String aggregationName = "averageStars";
+            SearchRequest searchRequest = averageAggregationRequest(SONG_INDEX_NAME, aggregationName, FIELD_STARS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containAggregationWithNameAndType(aggregationName, "avg"));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest").withIndex(SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldAggregateDataAndComputeAverage_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = averageAggregationRequest(PROHIBITED_SONG_INDEX_NAME, "averageStars", FIELD_STARS);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/prohibited_song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest").withIndex(PROHIBITED_SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldPerformStatAggregation_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            final String aggregationName = "statsStars";
+            SearchRequest searchRequest = statsAggregationRequest(SONG_INDEX_NAME, aggregationName, FIELD_STARS);
+
+            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, containAggregationWithNameAndType(aggregationName, "stats"));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldPerformStatAggregation_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SearchRequest searchRequest = statsAggregationRequest(PROHIBITED_SONG_INDEX_NAME, "statsStars", FIELD_STARS);
+
+            assertThatThrownBy(() -> restHighLevelClient.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/prohibited_song_lyrics/_search"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldIndexDocumentInBulkRequest_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("two").source(SONGS[1].asMap()));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(response, successBulkResponse());
+            assertThat(internalClient, clusterContainsDocument(WRITE_SONG_INDEX_NAME, "one"));
+            assertThat(internalClient, clusterContainsDocument(WRITE_SONG_INDEX_NAME, "two"));
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "one", FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "two", FIELD_TITLE, TITLE_SONG_1_PLUS_1)
+            );
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertAtLeast(4, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));// sometimes 4 or 6
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));// sometimes 2 or 4
+    }
+
+    @Test
+    public void shouldIndexDocumentInBulkRequest_partiallyPositive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("two").source(SONGS[1].asMap()));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(
+                response,
+                bulkResponseContainExceptions(0, allOf(statusException(INTERNAL_SERVER_ERROR), errorMessageContain("security_exception")))
+            );
+            assertThat(internalClient, clusterContainsDocument(WRITE_SONG_INDEX_NAME, "two"));
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "two", FIELD_TITLE, TITLE_SONG_1_PLUS_1)
+            );
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "BulkShardRequest").withIndex(SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldIndexDocumentInBulkRequest_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(SONG_INDEX_NAME).id("two").source(SONGS[1].asMap()));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(
+                response,
+                allOf(
+                    failureBulkResponse(),
+                    bulkResponseContainExceptions(statusException(INTERNAL_SERVER_ERROR)),
+                    bulkResponseContainExceptions(errorMessageContain("security_exception"))
+                )
+            );
+            assertThat(internalClient, not(clusterContainsDocument(SONG_INDEX_NAME, "one")));
+            assertThat(internalClient, not(clusterContainsDocument(SONG_INDEX_NAME, "two")));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "BulkShardRequest").withIndex(SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldUpdateDocumentsInBulk_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            final String titleOne = "shape of my mind";
+            final String titleTwo = "forgiven";
+            BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("two").source(SONGS[1].asMap()));
+            restHighLevelClient.bulk(bulkRequest, DEFAULT);
+            bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new UpdateRequest(WRITE_SONG_INDEX_NAME, "one").doc(Map.of(FIELD_TITLE, titleOne)));
+            bulkRequest.add(new UpdateRequest(WRITE_SONG_INDEX_NAME, "two").doc(Map.of(FIELD_TITLE, titleTwo)));
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(response, successBulkResponse());
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "one", FIELD_TITLE, titleOne));
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "two", FIELD_TITLE, titleTwo));
+        }
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+
+    }
+
+    @Test
+    public void shouldUpdateDocumentsInBulk_partiallyPositive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            final String titleOne = "shape of my mind";
+            BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            restHighLevelClient.bulk(bulkRequest, DEFAULT);
+            bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new UpdateRequest(WRITE_SONG_INDEX_NAME, "one").doc(Map.of(FIELD_TITLE, titleOne)));
+            bulkRequest.add(new UpdateRequest(SONG_INDEX_NAME, ID_S2).doc(Map.of(FIELD_TITLE, "forgiven")));
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(
+                response,
+                bulkResponseContainExceptions(1, allOf(statusException(INTERNAL_SERVER_ERROR), errorMessageContain("security_exception")))
+            );
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "one", FIELD_TITLE, titleOne));
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(SONG_INDEX_NAME, ID_S2, FIELD_TITLE, TITLE_SONG_1_PLUS_1));
+        }
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "BulkShardRequest").withIndex(SONG_INDEX_NAME));
+    }
+
+    @Test
+    public void shouldUpdateDocumentsInBulk_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new UpdateRequest(SONG_INDEX_NAME, ID_S1).doc(Map.of(FIELD_TITLE, "shape of my mind")));
+            bulkRequest.add(new UpdateRequest(SONG_INDEX_NAME, ID_S2).doc(Map.of(FIELD_TITLE, "forgiven")));
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(
+                response,
+                allOf(
+                    failureBulkResponse(),
+                    bulkResponseContainExceptions(statusException(INTERNAL_SERVER_ERROR)),
+                    bulkResponseContainExceptions(errorMessageContain("security_exception"))
+                )
+            );
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(SONG_INDEX_NAME, ID_S1, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(SONG_INDEX_NAME, ID_S2, FIELD_TITLE, TITLE_SONG_1_PLUS_1));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "BulkShardRequest"));
+    }
+
+    @Test
+    public void shouldDeleteDocumentInBulk_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("two").source(SONGS[1].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("three").source(SONGS[2].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("four").source(SONGS[3].asMap()));
+            assertThat(restHighLevelClient.bulk(bulkRequest, DEFAULT), successBulkResponse());
+            bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new DeleteRequest(WRITE_SONG_INDEX_NAME, "one"));
+            bulkRequest.add(new DeleteRequest(WRITE_SONG_INDEX_NAME, "three"));
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(response, successBulkResponse());
+            assertThat(internalClient, not(clusterContainsDocument(WRITE_SONG_INDEX_NAME, "one")));
+            assertThat(internalClient, not(clusterContainsDocument(WRITE_SONG_INDEX_NAME, "three")));
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "two", FIELD_TITLE, TITLE_SONG_1_PLUS_1)
+            );
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "four", FIELD_TITLE, TITLE_POISON));
+        }
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+    }
+
+    @Test
+    public void shouldDeleteDocumentInBulk_partiallyPositive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("one").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("two").source(SONGS[1].asMap()));
+            assertThat(restHighLevelClient.bulk(bulkRequest, DEFAULT), successBulkResponse());
+            bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new DeleteRequest(WRITE_SONG_INDEX_NAME, "one"));
+            bulkRequest.add(new DeleteRequest(SONG_INDEX_NAME, ID_S3));
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+            assertThat(internalClient, not(clusterContainsDocument(WRITE_SONG_INDEX_NAME, "one")));
+
+            assertThat(
+                response,
+                bulkResponseContainExceptions(1, allOf(statusException(INTERNAL_SERVER_ERROR), errorMessageContain("security_exception")))
+            );
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(WRITE_SONG_INDEX_NAME, "two", FIELD_TITLE, TITLE_SONG_1_PLUS_1)
+            );
+            assertThat(internalClient, clusterContainsDocumentWithFieldValue(SONG_INDEX_NAME, ID_S3, FIELD_TITLE, TITLE_NEXT_SONG));
+        }
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "BulkShardRequest"));
+    }
+
+    @Test
+    public void shouldDeleteDocumentInBulk_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            BulkRequest bulkRequest = new BulkRequest().setRefreshPolicy(IMMEDIATE);
+            bulkRequest.add(new DeleteRequest(SONG_INDEX_NAME, ID_S1));
+            bulkRequest.add(new DeleteRequest(SONG_INDEX_NAME, ID_S3));
+
+            BulkResponse response = restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            assertThat(
+                response,
+                allOf(
+                    failureBulkResponse(),
+                    bulkResponseContainExceptions(statusException(INTERNAL_SERVER_ERROR)),
+                    bulkResponseContainExceptions(errorMessageContain("security_exception"))
+                )
+            );
+            assertThat(internalClient, clusterContainsDocument(SONG_INDEX_NAME, ID_S1));
+            assertThat(internalClient, clusterContainsDocument(SONG_INDEX_NAME, ID_S3));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "BulkShardRequest"));
+
+    }
+
+    @Test
+    public void shouldReindexDocuments_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(REINDEXING_USER)) {
+            ReindexRequest reindexRequest = new ReindexRequest().setSourceIndices(SONG_INDEX_NAME).setDestIndex(WRITE_SONG_INDEX_NAME);
+
+            BulkByScrollResponse response = restHighLevelClient.reindex(reindexRequest, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response.getBulkFailures(), empty());
+            assertThat(response.getSearchFailures(), empty());
+            assertThat(internalClient, clusterContainsDocument(WRITE_SONG_INDEX_NAME, ID_S1));
+            assertThat(internalClient, clusterContainsDocument(WRITE_SONG_INDEX_NAME, ID_S2));
+            assertThat(internalClient, clusterContainsDocument(WRITE_SONG_INDEX_NAME, ID_S3));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(REINDEXING_USER).withRestRequest(POST, "/_reindex"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "ReindexRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "SearchRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(REINDEXING_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(REINDEXING_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "SearchScrollRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(REINDEXING_USER));
+        auditLogsRule.assertExactlyOne(missingPrivilege(REINDEXING_USER, "ClearScrollRequest"));
+    }
+
+    @Test
+    public void shouldReindexDocuments_negativeSource() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(REINDEXING_USER)) {
+            ReindexRequest reindexRequest = new ReindexRequest().setSourceIndices(PROHIBITED_SONG_INDEX_NAME)
+                .setDestIndex(WRITE_SONG_INDEX_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.reindex(reindexRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(internalClient, not(clusterContainsDocument(WRITE_SONG_INDEX_NAME, ID_P4)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(REINDEXING_USER).withRestRequest(POST, "/_reindex"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "ReindexRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(REINDEXING_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldReindexDocuments_negativeDestination() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(REINDEXING_USER)) {
+            ReindexRequest reindexRequest = new ReindexRequest().setSourceIndices(SONG_INDEX_NAME).setDestIndex(PROHIBITED_SONG_INDEX_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.reindex(reindexRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(internalClient, not(clusterContainsDocument(PROHIBITED_SONG_INDEX_NAME, ID_S1)));
+            assertThat(internalClient, not(clusterContainsDocument(PROHIBITED_SONG_INDEX_NAME, ID_S2)));
+            assertThat(internalClient, not(clusterContainsDocument(PROHIBITED_SONG_INDEX_NAME, ID_S3)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(REINDEXING_USER).withRestRequest(POST, "/_reindex"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "ReindexRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "SearchRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "BulkRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(REINDEXING_USER, "BulkShardRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(REINDEXING_USER, "ClearScrollRequest"));
+    }
+
+    @Test
+    public void shouldReindexDocuments_negativeSourceAndDestination() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(REINDEXING_USER)) {
+            ReindexRequest reindexRequest = new ReindexRequest().setSourceIndices(PROHIBITED_SONG_INDEX_NAME).setDestIndex(SONG_INDEX_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.reindex(reindexRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(REINDEXING_USER).withRestRequest(POST, "/_reindex"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(REINDEXING_USER, "ReindexRequest"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(REINDEXING_USER, "SearchRequest"));
+    }
+
+    @Test
+    public void shouldUpdateDocument_positive() throws IOException {
+        String newField = "newField";
+        String newValue = "newValue";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(UPDATE_DELETE_USER)) {
+            UpdateRequest updateRequest = new UpdateRequest(UPDATE_DELETE_OPERATION_INDEX_NAME, DOCUMENT_TO_UPDATE_ID).doc(
+                newField,
+                newValue
+            ).setRefreshPolicy(IMMEDIATE);
+
+            UpdateResponse response = restHighLevelClient.update(updateRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulUpdateResponse());
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(UPDATE_DELETE_OPERATION_INDEX_NAME, DOCUMENT_TO_UPDATE_ID, newField, newValue)
+            );
+        }
+    }
+
+    @Test
+    public void shouldUpdateDocument_negative() throws IOException {
+        String newField = "newField";
+        String newValue = "newValue";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(UPDATE_DELETE_USER)) {
+            UpdateRequest updateRequest = new UpdateRequest(PROHIBITED_SONG_INDEX_NAME, DOCUMENT_TO_UPDATE_ID).doc(newField, newValue)
+                .setRefreshPolicy(IMMEDIATE);
+
+            assertThatThrownBy(() -> restHighLevelClient.update(updateRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldDeleteDocument_positive() throws IOException {
+        String docId = "shouldDeleteDocument_positive";
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.index(
+                new IndexRequest(UPDATE_DELETE_OPERATION_INDEX_NAME).id(docId).source("field", "value").setRefreshPolicy(IMMEDIATE)
+            ).actionGet();
+            assertThat(internalClient, clusterContainsDocument(UPDATE_DELETE_OPERATION_INDEX_NAME, docId));
+        }
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(UPDATE_DELETE_USER)) {
+            DeleteRequest deleteRequest = new DeleteRequest(UPDATE_DELETE_OPERATION_INDEX_NAME, docId).setRefreshPolicy(IMMEDIATE);
+
+            DeleteResponse response = restHighLevelClient.delete(deleteRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulDeleteResponse());
+            assertThat(internalClient, not(clusterContainsDocument(UPDATE_DELETE_OPERATION_INDEX_NAME, docId)));
+        }
+    }
+
+    @Test
+    public void shouldDeleteDocument_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(UPDATE_DELETE_USER)) {
+            DeleteRequest deleteRequest = new DeleteRequest(PROHIBITED_SONG_INDEX_NAME, ID_S1).setRefreshPolicy(IMMEDIATE);
+
+            assertThatThrownBy(() -> restHighLevelClient.delete(deleteRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldCreateAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            AliasActions aliasAction = new AliasActions(ADD).indices(SONG_INDEX_NAME).alias(TEMPORARY_ALIAS_NAME);
+            IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest().addAliasAction(aliasAction);
+
+            var response = restHighLevelClient.indices().updateAliases(indicesAliasesRequest, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, clusterContainsDocument(TEMPORARY_ALIAS_NAME, ID_S1));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_aliases"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_READ_USER, "IndicesAliasesRequest"));
+        auditLogsRule.assertExactly(2, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_READ_USER));
+    }
+
+    @Test
+    public void shouldCreateAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            AliasActions aliasAction = new AliasActions(ADD).indices(PROHIBITED_SONG_INDEX_NAME).alias(TEMPORARY_ALIAS_NAME);
+            IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest().addAliasAction(aliasAction);
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().updateAliases(indicesAliasesRequest, DEFAULT),
+                statusException(FORBIDDEN)
+            );
+
+            assertThat(internalClient, not(clusterContainsDocument(TEMPORARY_ALIAS_NAME, ID_P4)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_aliases"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "IndicesAliasesRequest"));
+    }
+
+    @Test
+    public void shouldDeleteAlias_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            AliasActions aliasAction = new AliasActions(ADD).indices(SONG_INDEX_NAME).alias(TEMPORARY_ALIAS_NAME);
+            IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest().addAliasAction(aliasAction);
+            restHighLevelClient.indices().updateAliases(indicesAliasesRequest, DEFAULT);
+            aliasAction = new AliasActions(REMOVE).indices(SONG_INDEX_NAME).alias(TEMPORARY_ALIAS_NAME);
+            indicesAliasesRequest = new IndicesAliasesRequest().addAliasAction(aliasAction);
+
+            var response = restHighLevelClient.indices().updateAliases(indicesAliasesRequest, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, not(clusterContainsDocument(TEMPORARY_ALIAS_NAME, ID_S1)));
+        }
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_aliases"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_READ_USER, "IndicesAliasesRequest"));
+        auditLogsRule.assertExactly(4, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_READ_USER));
+    }
+
+    @Test
+    public void shouldDeleteAlias_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            AliasActions aliasAction = new AliasActions(REMOVE).indices(PROHIBITED_SONG_INDEX_NAME).alias(PROHIBITED_SONG_ALIAS);
+            IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest().addAliasAction(aliasAction);
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().updateAliases(indicesAliasesRequest, DEFAULT),
+                statusException(FORBIDDEN)
+            );
+
+            assertThat(internalClient, clusterContainsDocument(PROHIBITED_SONG_INDEX_NAME, ID_P4));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(POST, "/_aliases"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "IndicesAliasesRequest"));
+    }
+
+    @Test
+    public void shouldCreateIndexTemplate_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            PutIndexTemplateRequest request = new PutIndexTemplateRequest(MUSICAL_INDEX_TEMPLATE).patterns(List.of(TEMPLATE_INDEX_PREFIX))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002));
+
+            var response = restHighLevelClient.indices().putTemplate(request, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, clusterContainTemplate(MUSICAL_INDEX_TEMPLATE));
+            String documentId = "0001";
+            IndexRequest indexRequest = new IndexRequest(INDEX_NAME_SONG_TRANSCRIPTION_JAZZ).id(documentId)
+                .source(SONGS[0].asMap())
+                .setRefreshPolicy(IMMEDIATE);
+            restHighLevelClient.index(indexRequest, DEFAULT);
+            assertThat(internalClient, clusterContainsDocument(INDEX_NAME_SONG_TRANSCRIPTION_JAZZ, documentId));
+            assertThat(internalClient, clusterContainsDocument(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001, documentId));
+            assertThat(internalClient, clusterContainsDocument(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002, documentId));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_template/musical-index-template"));
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/song-transcription-jazz/_doc/0001"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutIndexTemplateRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "IndexRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(8, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+    }
+
+    @Test
+    public void shouldCreateIndexTemplate_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            PutIndexTemplateRequest request = new PutIndexTemplateRequest(MUSICAL_INDEX_TEMPLATE).patterns(List.of(TEMPLATE_INDEX_PREFIX))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002));
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().putTemplate(request, DEFAULT), statusException(FORBIDDEN));
+            assertThat(internalClient, not(clusterContainTemplate(MUSICAL_INDEX_TEMPLATE)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(PUT, "/_template/musical-index-template"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "PutIndexTemplateRequest"));
+    }
+
+    @Test
+    public void shouldDeleteTemplate_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            PutIndexTemplateRequest request = new PutIndexTemplateRequest(MUSICAL_INDEX_TEMPLATE).patterns(List.of(TEMPLATE_INDEX_PREFIX));
+            restHighLevelClient.indices().putTemplate(request, DEFAULT);
+            assertThat(internalClient, clusterContainTemplate(MUSICAL_INDEX_TEMPLATE));
+            DeleteIndexTemplateRequest deleteRequest = new DeleteIndexTemplateRequest(MUSICAL_INDEX_TEMPLATE);
+
+            var response = restHighLevelClient.indices().deleteTemplate(deleteRequest, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, not(clusterContainTemplate(MUSICAL_INDEX_TEMPLATE)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_template/musical-index-template"));
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(DELETE, "/_template/musical-index-template"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutIndexTemplateRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "DeleteIndexTemplateRequest"));
+        auditLogsRule.assertExactly(4, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+    }
+
+    @Test
+    public void shouldDeleteTemplate_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            DeleteIndexTemplateRequest deleteRequest = new DeleteIndexTemplateRequest(UNDELETABLE_TEMPLATE_NAME);
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().deleteTemplate(deleteRequest, DEFAULT), statusException(FORBIDDEN));
+
+            assertThat(internalClient, clusterContainTemplate(UNDELETABLE_TEMPLATE_NAME));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_READ_USER).withRestRequest(DELETE, "/_template/undeletable-template-name")
+        );
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "DeleteIndexTemplateRequest"));
+    }
+
+    @Test
+    public void shouldUpdateTemplate_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            PutIndexTemplateRequest request = new PutIndexTemplateRequest(MUSICAL_INDEX_TEMPLATE).patterns(List.of(TEMPLATE_INDEX_PREFIX))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002));
+            restHighLevelClient.indices().putTemplate(request, DEFAULT);
+            assertThat(internalClient, clusterContainTemplate(MUSICAL_INDEX_TEMPLATE));
+            request = new PutIndexTemplateRequest(MUSICAL_INDEX_TEMPLATE).patterns(List.of(TEMPLATE_INDEX_PREFIX))
+                .alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0003));
+
+            var response = restHighLevelClient.indices().putTemplate(request, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            String documentId = "000one";
+            IndexRequest indexRequest = new IndexRequest(INDEX_NAME_SONG_TRANSCRIPTION_JAZZ).id(documentId)
+                .source(SONGS[0].asMap())
+                .setRefreshPolicy(IMMEDIATE);
+            restHighLevelClient.index(indexRequest, DEFAULT);
+            assertThat(internalClient, clusterContainTemplate(MUSICAL_INDEX_TEMPLATE));
+            assertThat(internalClient, clusterContainsDocument(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0003, documentId));
+            assertThat(internalClient, not(clusterContainsDocument(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0001, documentId)));
+            assertThat(internalClient, not(clusterContainsDocument(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0002, documentId)));
+        }
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_template/musical-index-template"));
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/song-transcription-jazz/_doc/000one"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutIndexTemplateRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "IndexRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(10, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+    }
+
+    @Test
+    public void shouldUpdateTemplate_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            PutIndexTemplateRequest request = new PutIndexTemplateRequest(UNDELETABLE_TEMPLATE_NAME).patterns(
+                List.of(TEMPLATE_INDEX_PREFIX)
+            ).alias(new Alias(ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0003));
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().putTemplate(request, DEFAULT), statusException(FORBIDDEN));
+            assertThat(internalClient, clusterContainTemplateWithAlias(UNDELETABLE_TEMPLATE_NAME, ALIAS_FROM_UNDELETABLE_TEMPLATE));
+            assertThat(
+                internalClient,
+                not(clusterContainTemplateWithAlias(UNDELETABLE_TEMPLATE_NAME, ALIAS_USED_IN_MUSICAL_INDEX_TEMPLATE_0003))
+            );
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(PUT, "/_template/undeletable-template-name"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "PutIndexTemplateRequest"));
+    }
+
+    @Test
+    public void shouldGetFieldCapabilitiesForAllIndexes_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(ADMIN_USER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().fields(FIELD_TITLE);
+
+            FieldCapabilitiesResponse response = restHighLevelClient.fieldCaps(request, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response, containsExactlyIndices(SONG_INDEX_NAME, PROHIBITED_SONG_INDEX_NAME, UPDATE_DELETE_OPERATION_INDEX_NAME));
+            assertThat(response, numberOfFieldsIsEqualTo(1));
+            assertThat(response, containsFieldWithNameAndType(FIELD_TITLE, "text"));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(ADMIN_USER).withRestRequest(GET, "/_field_caps"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(ADMIN_USER, "FieldCapabilitiesRequest"));
+        auditLogsRule.assertExactly(3, grantedPrivilege(ADMIN_USER, "FieldCapabilitiesIndexRequest"));
+    }
+
+    @Test
+    public void shouldGetFieldCapabilitiesForAllIndexes_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().fields(FIELD_TITLE);
+
+            assertThatThrownBy(() -> restHighLevelClient.fieldCaps(request, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(GET, "/_field_caps"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "FieldCapabilitiesRequest"));
+    }
+
+    @Test
+    public void shouldGetFieldCapabilitiesForParticularIndex_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().indices(SONG_INDEX_NAME).fields(FIELD_TITLE);
+
+            FieldCapabilitiesResponse response = restHighLevelClient.fieldCaps(request, DEFAULT);
+
+            assertThat(response, notNullValue());
+            assertThat(response, containsExactlyIndices(SONG_INDEX_NAME));
+            assertThat(response, numberOfFieldsIsEqualTo(1));
+            assertThat(response, containsFieldWithNameAndType(FIELD_TITLE, "text"));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(GET, "/song_lyrics/_field_caps"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "FieldCapabilitiesRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_READ_USER, "FieldCapabilitiesIndexRequest"));
+    }
+
+    @Test
+    public void shouldGetFieldCapabilitiesForParticularIndex_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            FieldCapabilitiesRequest request = new FieldCapabilitiesRequest().indices(PROHIBITED_SONG_INDEX_NAME).fields(FIELD_TITLE);
+
+            assertThatThrownBy(() -> restHighLevelClient.fieldCaps(request, DEFAULT), statusException(FORBIDDEN));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(GET, "/prohibited_song_lyrics/_field_caps"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "FieldCapabilitiesRequest"));
+    }
+
+    @Test
+    public void shouldCreateSnapshotRepository_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            String snapshotDirPath = cluster.getSnapshotDirPath();
+
+            var response = steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotDirPath, "fs");
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, clusterContainsSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+    }
+
+    @Test
+    public void shouldCreateSnapshotRepository_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            String snapshotDirPath = cluster.getSnapshotDirPath();
+
+            assertThatThrownBy(
+                () -> steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotDirPath, "fs"),
+                statusException(FORBIDDEN)
+            );
+            assertThat(internalClient, not(clusterContainsSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_READ_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "PutRepositoryRequest"));
+    }
+
+    @Test
+    public void shouldDeleteSnapshotRepository_positive() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+            assertThat(internalClient, clusterContainsSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME));
+
+            var response = steps.deleteSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME);
+
+            assertThat(response, notNullValue());
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, not(clusterContainsSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME)));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(DELETE, "/_snapshot/test-snapshot-repository")
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "DeleteRepositoryRequest"));
+    }
+
+    @Test
+    public void shouldDeleteSnapshotRepository_negative() throws IOException {
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+
+            assertThatThrownBy(() -> steps.deleteSnapshotRepository(UNUSED_SNAPSHOT_REPOSITORY_NAME), statusException(FORBIDDEN));
+            assertThat(internalClient, clusterContainsSnapshotRepository(UNUSED_SNAPSHOT_REPOSITORY_NAME));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_READ_USER).withRestRequest(DELETE, "/_snapshot/unused-snapshot-repository")
+        );
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "DeleteRepositoryRequest"));
+    }
+
+    @Test // Bug which can be reproduced with the below test: https://github.com/opensearch-project/security/issues/2169
+    public void shouldCreateSnapshot_positive() throws IOException {
+        final String snapshotName = "snapshot-positive-test";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+
+            CreateSnapshotResponse response = steps.createSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, SONG_INDEX_NAME);
+
+            assertThat(response, notNullValue());
+            assertThat(response.status(), equalTo(RestStatus.ACCEPTED));
+            steps.waitForSnapshotCreation(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+            assertThat(internalClient, clusterContainSuccessSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository/snapshot-positive-test")
+        );
+        auditLogsRule.assertAtLeast(
+            1,
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(GET, "/_snapshot/test-snapshot-repository/snapshot-positive-test")
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateSnapshotRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "GetSnapshotsRequest"));
+    }
+
+    @Test
+    public void shouldCreateSnapshot_negative() throws IOException {
+        final String snapshotName = "snapshot-negative-test";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+
+            assertThatThrownBy(
+                () -> steps.createSnapshot(UNUSED_SNAPSHOT_REPOSITORY_NAME, snapshotName, SONG_INDEX_NAME),
+                statusException(FORBIDDEN)
+            );
+
+            assertThat(internalClient, snapshotInClusterDoesNotExists(UNUSED_SNAPSHOT_REPOSITORY_NAME, snapshotName));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_READ_USER).withRestRequest(PUT, "/_snapshot/unused-snapshot-repository/snapshot-negative-test")
+        );
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_READ_USER, "CreateSnapshotRequest"));
+    }
+
+    @Test
+    public void shouldDeleteSnapshot_positive() throws IOException {
+        String snapshotName = "delete-snapshot-positive";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            restHighLevelClient.snapshot();
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+            steps.createSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, SONG_INDEX_NAME);
+            steps.waitForSnapshotCreation(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+
+            var response = steps.deleteSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+
+            assertThat(response.isAcknowledged(), equalTo(true));
+            assertThat(internalClient, snapshotInClusterDoesNotExists(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository/delete-snapshot-positive")
+        );
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(DELETE, "/_snapshot/test-snapshot-repository/delete-snapshot-positive")
+        );
+        auditLogsRule.assertAtLeast(
+            1,
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(GET, "/_snapshot/test-snapshot-repository/delete-snapshot-positive")
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateSnapshotRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "DeleteSnapshotRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "GetSnapshotsRequest"));
+    }
+
+    @Test
+    public void shouldDeleteSnapshot_negative() throws IOException {
+        String snapshotName = "delete-snapshot-negative";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+            steps.createSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, SONG_INDEX_NAME);
+            steps.waitForSnapshotCreation(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+        }
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            assertThatThrownBy(() -> steps.deleteSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName), statusException(FORBIDDEN));
+
+            assertThat(internalClient, clusterContainSuccessSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository/delete-snapshot-negative")
+        );
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_READ_USER).withRestRequest(DELETE, "/_snapshot/test-snapshot-repository/delete-snapshot-negative")
+        );
+        auditLogsRule.assertAtLeast(
+            1,
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(GET, "/_snapshot/test-snapshot-repository/delete-snapshot-negative")
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateSnapshotRequest"));
+        auditLogsRule.assertExactly(1, missingPrivilege(LIMITED_READ_USER, "DeleteSnapshotRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "GetSnapshotsRequest"));
+    }
+
+    @Test
+    public void shouldRestoreSnapshot_positive() throws IOException {
+        final String snapshotName = "restore-snapshot-positive";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            // 1. create some documents
+            BulkRequest bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Eins").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Zwei").source(SONGS[1].asMap()));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+            restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            // 2. create snapshot repository
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+
+            // 3. create snapshot
+            steps.createSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, WRITE_SONG_INDEX_NAME);
+
+            // 4. wait till snapshot is ready
+            steps.waitForSnapshotCreation(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+
+            // 5. introduce some changes
+            bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Drei").source(SONGS[2].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Vier").source(SONGS[3].asMap()));
+            bulkRequest.add(new DeleteRequest(WRITE_SONG_INDEX_NAME, "Eins"));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+            restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            // 6. restore the snapshot
+            var response = steps.restoreSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, "(.+)", "restored_$1");
+
+            assertThat(response, notNullValue());
+            assertThat(response.status(), equalTo(ACCEPTED));
+
+            // 7. wait until snapshot is restored
+            CountRequest countRequest = new CountRequest(RESTORED_SONG_INDEX_NAME);
+            Awaitility.await()
+                .ignoreExceptions()
+                .alias("Index contains proper number of documents restored from snapshot.")
+                .until(() -> restHighLevelClient.count(countRequest, DEFAULT).getCount() == 2);
+
+            // 8. verify that document are present in restored index
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(RESTORED_SONG_INDEX_NAME, "Eins", FIELD_TITLE, TITLE_MAGNUM_OPUS)
+            );
+            assertThat(
+                internalClient,
+                clusterContainsDocumentWithFieldValue(RESTORED_SONG_INDEX_NAME, "Zwei", FIELD_TITLE, TITLE_SONG_1_PLUS_1)
+            );
+            assertThat(internalClient, not(clusterContainsDocument(RESTORED_SONG_INDEX_NAME, "Drei")));
+            assertThat(internalClient, not(clusterContainsDocument(RESTORED_SONG_INDEX_NAME, "Vier")));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository/restore-snapshot-positive")
+        );
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(
+                POST,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-positive/_restore"
+            )
+        );
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/restored_write_song_index/_count"));
+        auditLogsRule.assertExactly(2, userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertAtLeast(
+            1,
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(GET, "/_snapshot/test-snapshot-repository/restore-snapshot-positive")
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateSnapshotRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "RestoreSnapshotRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "SearchRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "GetSnapshotsRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+    }
+
+    @Test
+    public void shouldRestoreSnapshot_failureForbiddenIndex() throws IOException {
+        final String snapshotName = "restore-snapshot-negative-forbidden-index";
+        String restoreToIndex = "forbidden_index";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            // 1. create some documents
+            BulkRequest bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Eins").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Zwei").source(SONGS[1].asMap()));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+            restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            // 2. create snapshot repository
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+
+            // 3. create snapshot
+            steps.createSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, WRITE_SONG_INDEX_NAME);
+
+            // 4. wait till snapshot is ready
+            steps.waitForSnapshotCreation(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+
+            // 5. restore the snapshot
+            assertThatThrownBy(
+                () -> steps.restoreSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, "(.+)", restoreToIndex),
+                statusException(FORBIDDEN)
+            );
+
+            // 6. verify that document are not present in restored index
+            assertThat(internalClient, not(clusterContainsDocument(RESTORED_SONG_INDEX_NAME, "Eins")));
+            assertThat(internalClient, not(clusterContainsDocument(RESTORED_SONG_INDEX_NAME, "Zwei")));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(
+                PUT,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-negative-forbidden-index"
+            )
+        );
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(
+                POST,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-negative-forbidden-index/_restore"
+            )
+        );
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertAtLeast(
+            1,
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(
+                GET,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-negative-forbidden-index"
+            )
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateSnapshotRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "RestoreSnapshotRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "GetSnapshotsRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+        auditLogsRule.assertExactlyOne(missingPrivilege(LIMITED_WRITE_USER, "RestoreSnapshotRequest"));
+    }
+
+    @Test
+    public void shouldRestoreSnapshot_failureOperationForbidden() throws IOException {
+        String snapshotName = "restore-snapshot-negative-forbidden-operation";
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_WRITE_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            // 1. create some documents
+            BulkRequest bulkRequest = new BulkRequest();
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Eins").source(SONGS[0].asMap()));
+            bulkRequest.add(new IndexRequest(WRITE_SONG_INDEX_NAME).id("Zwei").source(SONGS[1].asMap()));
+            bulkRequest.setRefreshPolicy(IMMEDIATE);
+            restHighLevelClient.bulk(bulkRequest, DEFAULT);
+
+            // 2. create snapshot repository
+            steps.createSnapshotRepository(TEST_SNAPSHOT_REPOSITORY_NAME, cluster.getSnapshotDirPath(), "fs");
+
+            // 3. create snapshot
+            steps.createSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, WRITE_SONG_INDEX_NAME);
+
+            // 4. wait till snapshot is ready
+            steps.waitForSnapshotCreation(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName);
+        }
+        // 5. restore the snapshot
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(LIMITED_READ_USER)) {
+            SnapshotSteps steps = new SnapshotSteps(restHighLevelClient);
+            assertThatThrownBy(
+                () -> steps.restoreSnapshot(TEST_SNAPSHOT_REPOSITORY_NAME, snapshotName, "(.+)", "restored_$1"),
+                statusException(FORBIDDEN)
+            );
+
+            // 6. verify that documents does not exist
+            assertThat(internalClient, not(clusterContainsDocument(RESTORED_SONG_INDEX_NAME, "Eins")));
+            assertThat(internalClient, not(clusterContainsDocument(RESTORED_SONG_INDEX_NAME, "Zwei")));
+        }
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(PUT, "/_snapshot/test-snapshot-repository"));
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(
+                PUT,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-negative-forbidden-operation"
+            )
+        );
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(LIMITED_READ_USER).withRestRequest(
+                POST,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-negative-forbidden-operation/_restore"
+            )
+        );
+        auditLogsRule.assertExactlyOne(userAuthenticated(LIMITED_WRITE_USER).withRestRequest(POST, "/_bulk"));
+        auditLogsRule.assertAtLeast(
+            1,
+            userAuthenticated(LIMITED_WRITE_USER).withRestRequest(
+                GET,
+                "/_snapshot/test-snapshot-repository/restore-snapshot-negative-forbidden-operation"
+            )
+        );
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "PutRepositoryRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateSnapshotRequest"));
+        auditLogsRule.assertExactlyOne(grantedPrivilege(LIMITED_WRITE_USER, "BulkRequest"));
+        auditLogsRule.assertExactly(2, grantedPrivilege(LIMITED_WRITE_USER, "CreateIndexRequest"));
+        auditLogsRule.assertExactly(4, grantedPrivilege(LIMITED_WRITE_USER, "PutMappingRequest"));
+        auditLogsRule.assertExactly(1, missingPrivilege(LIMITED_READ_USER, "RestoreSnapshotRequest"));
+        auditLogsRule.assertAtLeast(2, grantedPrivilege(LIMITED_WRITE_USER, "GetSnapshotsRequest"));
+        auditLogsRule.assertExactly(6, auditPredicate(INDEX_EVENT).withEffectiveUser(LIMITED_WRITE_USER));
+    }
+
+    @Test
+    // required permissions: "indices:admin/create"
+    public void createIndex_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("create_index_positive");
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
+            CreateIndexResponse createIndexResponse = restHighLevelClient.indices().create(createIndexRequest, DEFAULT);
+
+            assertThat(createIndexResponse, isSuccessfulCreateIndexResponse(indexName));
+            assertThat(cluster, indexExists(indexName));
+        }
+    }
+
+    @Test
+    public void createIndex_negative() throws IOException {
+        String indexName = "create_index_negative";
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().create(createIndexRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(indexName)));
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/get"
+    public void checkIfIndexExists_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("index_exists_positive");
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            boolean exists = restHighLevelClient.indices().exists(new GetIndexRequest(indexName), DEFAULT);
+
+            assertThat(exists, is(false));
+        }
+    }
+
+    @Test
+    public void checkIfIndexExists_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "index_exists_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().exists(new GetIndexRequest(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .exists(new GetIndexRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(() -> restHighLevelClient.indices().exists(new GetIndexRequest("*"), DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/delete"
+    public void deleteIndex_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("delete_index_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indexName);
+            var response = restHighLevelClient.indices().delete(deleteIndexRequest, DEFAULT);
+
+            assertThat(response.isAcknowledged(), is(true));
+            assertThat(cluster, not(indexExists(indexName)));
+        }
+    }
+
+    @Test
+    public void deleteIndex_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "delete_index_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().delete(new DeleteIndexRequest(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .delete(new DeleteIndexRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().delete(new DeleteIndexRequest("*"), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    // required permissions: indices:admin/aliases, indices:admin/delete
+    public void shouldDeleteIndexByAliasRequest_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("delete_index_by_alias_request_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            IndicesAliasesRequest request = new IndicesAliasesRequest().addAliasAction(new AliasActions(REMOVE_INDEX).indices(indexName));
+
+            var response = restHighLevelClient.indices().updateAliases(request, DEFAULT);
+
+            assertThat(response.isAcknowledged(), is(true));
+            assertThat(cluster, not(indexExists(indexName)));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES).withRestRequest(POST, "/_aliases")
+        );
+        auditLogsRule.assertExactly(
+            2,
+            grantedPrivilege(USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES, "IndicesAliasesRequest")
+        );
+        auditLogsRule.assertExactly(
+            2,
+            auditPredicate(INDEX_EVENT).withEffectiveUser(USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES)
+        );
+    }
+
+    @Test
+    public void shouldDeleteIndexByAliasRequest_negative() throws IOException {
+        String indexName = "delete_index_by_alias_request_negative";
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            IndicesAliasesRequest request = new IndicesAliasesRequest().addAliasAction(new AliasActions(REMOVE_INDEX).indices(indexName));
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().updateAliases(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/get"
+    public void getIndex_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("get_index_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            GetIndexRequest getIndexRequest = new GetIndexRequest(indexName);
+            GetIndexResponse response = restHighLevelClient.indices().get(getIndexRequest, DEFAULT);
+
+            assertThat(response, getIndexResponseContainsIndices(indexName));
+        }
+    }
+
+    @Test
+    public void getIndex_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "get_index_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().get(new GetIndexRequest(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().get(new GetIndexRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(() -> restHighLevelClient.indices().get(new GetIndexRequest("*"), DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/close", "indices:admin/close*"
+    public void closeIndex_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("close_index_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            CloseIndexRequest closeIndexRequest = new CloseIndexRequest(indexName);
+            CloseIndexResponse response = restHighLevelClient.indices().close(closeIndexRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulCloseIndexResponse());
+            assertThat(cluster, indexStateIsEqualTo(indexName, IndexMetadata.State.CLOSE));
+        }
+    }
+
+    @Test
+    public void closeIndex_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "close_index_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().close(new CloseIndexRequest(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .close(new CloseIndexRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(() -> restHighLevelClient.indices().close(new CloseIndexRequest("*"), DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/open"
+    public void openIndex_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("open_index_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+        IndexOperationsHelper.closeIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            OpenIndexRequest closeIndexRequest = new OpenIndexRequest(indexName);
+            OpenIndexResponse response = restHighLevelClient.indices().open(closeIndexRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulOpenIndexResponse());
+            assertThat(cluster, indexStateIsEqualTo(indexName, IndexMetadata.State.OPEN));
+        }
+    }
+
+    @Test
+    public void openIndex_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "open_index_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().open(new OpenIndexRequest(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .open(new OpenIndexRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(() -> restHighLevelClient.indices().open(new OpenIndexRequest("*"), DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    @Ignore
+    // required permissions: "indices:admin/resize", "indices:monitor/stats
+    // todo even when I assign the `indices:admin/resize` and `indices:monitor/stats` permissions to test user, this test fails.
+    // Issue: https://github.com/opensearch-project/security/issues/2141
+    public void shrinkIndex_positive() throws IOException {
+        String sourceIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("shrink_index_positive_source");
+        Settings sourceIndexSettings = Settings.builder().put("index.blocks.write", true).put("index.number_of_shards", 2).build();
+        String targetIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("shrink_index_positive_target");
+        IndexOperationsHelper.createIndex(cluster, sourceIndexName, sourceIndexSettings);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+            ResizeResponse response = restHighLevelClient.indices().shrink(resizeRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulResizeResponse(targetIndexName));
+            assertThat(cluster, indexExists(targetIndexName));
+        }
+    }
+
+    @Test
+    public void shrinkIndex_negative() throws IOException {
+        // user cannot access target index
+        String sourceIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("shrink_index_negative_source");
+        String targetIndexName = "shrink_index_negative_target";
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().shrink(resizeRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(targetIndexName)));
+        }
+
+        // user cannot access source index
+        sourceIndexName = "shrink_index_negative_source";
+        targetIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("shrink_index_negative_target");
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().shrink(resizeRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(targetIndexName)));
+        }
+    }
+
+    @Test
+    @Ignore
+    // required permissions: "indices:admin/resize", "indices:monitor/stats
+    // todo even when I assign the `indices:admin/resize` and `indices:monitor/stats` permissions to test user, this test fails.
+    // Issue: https://github.com/opensearch-project/security/issues/2141
+    public void cloneIndex_positive() throws IOException {
+        String sourceIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("clone_index_positive_source");
+        Settings sourceIndexSettings = Settings.builder().put("index.blocks.write", true).build();
+        String targetIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("clone_index_positive_target");
+        IndexOperationsHelper.createIndex(cluster, sourceIndexName, sourceIndexSettings);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+            ResizeResponse response = restHighLevelClient.indices().clone(resizeRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulResizeResponse(targetIndexName));
+            assertThat(cluster, indexExists(targetIndexName));
+        }
+    }
+
+    @Test
+    public void cloneIndex_negative() throws IOException {
+        // user cannot access target index
+        String sourceIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("clone_index_negative_source");
+        String targetIndexName = "clone_index_negative_target";
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().clone(resizeRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(targetIndexName)));
+        }
+
+        // user cannot access source index
+        sourceIndexName = "clone_index_negative_source";
+        targetIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("clone_index_negative_target");
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().clone(resizeRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(targetIndexName)));
+        }
+    }
+
+    @Test
+    @Ignore
+    // required permissions: "indices:admin/resize", "indices:monitor/stats
+    // todo even when I assign the `indices:admin/resize` and `indices:monitor/stats` permissions to test user, this test fails.
+    // Issue: https://github.com/opensearch-project/security/issues/2141
+    public void splitIndex_positive() throws IOException {
+        String sourceIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("split_index_positive_source");
+        Settings sourceIndexSettings = Settings.builder().put("index.blocks.write", true).build();
+        String targetIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("split_index_positive_target");
+        IndexOperationsHelper.createIndex(cluster, sourceIndexName, sourceIndexSettings);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+            resizeRequest.setSettings(Settings.builder().put("index.number_of_shards", 2).build());
+            ResizeResponse response = restHighLevelClient.indices().split(resizeRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulResizeResponse(targetIndexName));
+            assertThat(cluster, indexExists(targetIndexName));
+        }
+    }
+
+    @Test
+    public void splitIndex_negative() throws IOException {
+        // user cannot access target index
+        String sourceIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("split_index_negative_source");
+        String targetIndexName = "split_index_negative_target";
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+            resizeRequest.setSettings(Settings.builder().put("index.number_of_shards", 2).build());
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().split(resizeRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(targetIndexName)));
+        }
+
+        // user cannot access source index
+        sourceIndexName = "split_index_negative_source";
+        targetIndexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("split_index_negative_target");
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ResizeRequest resizeRequest = new ResizeRequest(targetIndexName, sourceIndexName);
+            resizeRequest.setSettings(Settings.builder().put("index.number_of_shards", 2).build());
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().split(resizeRequest, DEFAULT), statusException(FORBIDDEN));
+            assertThat(cluster, not(indexExists(targetIndexName)));
+        }
+    }
+
+    @Test
+    // required permissions: "indices:monitor/settings/get"
+    public void getIndexSettings_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("get_index_settings_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indexName);
+            GetSettingsResponse response = restHighLevelClient.indices().getSettings(getSettingsRequest, DEFAULT);
+
+            assertThat(response, getSettingsResponseContainsIndices(indexName));
+        }
+    }
+
+    @Test
+    public void getIndexSettings_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "get_index_settings_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().getSettings(new GetSettingsRequest().indices(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .getSettings(new GetSettingsRequest().indices(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().getSettings(new GetSettingsRequest().indices("*"), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/settings/update"
+    public void updateIndexSettings_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("update_index_settings_positive");
+        Settings initialSettings = Settings.builder().put("index.number_of_replicas", "2").build();
+        Settings updatedSettings = Settings.builder().put("index.number_of_replicas", "4").build();
+        IndexOperationsHelper.createIndex(cluster, indexName, initialSettings);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexName).settings(updatedSettings);
+            var response = restHighLevelClient.indices().putSettings(updateSettingsRequest, DEFAULT);
+
+            assertThat(response.isAcknowledged(), is(true));
+            assertThat(cluster, indexSettingsContainValues(indexName, updatedSettings));
+        }
+    }
+
+    @Test
+    public void updateIndexSettings_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "update_index_settings_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        Settings settingsToUpdate = Settings.builder().put("index.number_of_replicas", 2).build();
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .putSettings(new UpdateSettingsRequest(indexThatUserHasNoAccessTo).settings(settingsToUpdate), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .putSettings(
+                        new UpdateSettingsRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo).settings(settingsToUpdate),
+                        DEFAULT
+                    ),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().putSettings(new UpdateSettingsRequest("*").settings(settingsToUpdate), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    // required permissions: indices:admin/mapping/put
+    public void createIndexMappings_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("create_index_mappings_positive");
+        Map<String, Object> indexMapping = Map.of("properties", Map.of("message", Map.of("type", "text")));
+        IndexOperationsHelper.createIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            PutMappingRequest putMappingRequest = new PutMappingRequest(indexName).source(indexMapping);
+            var response = restHighLevelClient.indices().putMapping(putMappingRequest, DEFAULT);
+
+            assertThat(response.isAcknowledged(), is(true));
+            assertThat(cluster, indexMappingIsEqualTo(indexName, indexMapping));
+        }
+    }
+
+    @Test
+    public void createIndexMappings_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "create_index_mappings_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        Map<String, Object> indexMapping = Map.of("properties", Map.of("message", Map.of("type", "text")));
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .putMapping(new PutMappingRequest(indexThatUserHasNoAccessTo).source(indexMapping), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .putMapping(new PutMappingRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo).source(indexMapping), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().putMapping(new PutMappingRequest("*").source(indexMapping), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    // required permissions: indices:admin/mappings/get
+    public void getIndexMappings_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("get_index_mappings_positive");
+        Map<String, Object> indexMapping = Map.of("properties", Map.of("message", Map.of("type", "text")));
+        IndexOperationsHelper.createIndex(cluster, indexName);
+        IndexOperationsHelper.createMapping(cluster, indexName, indexMapping);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(indexName);
+            GetMappingsResponse response = restHighLevelClient.indices().getMapping(getMappingsRequest, DEFAULT);
+
+            assertThat(response, getMappingsResponseContainsIndices(indexName));
+        }
+    }
+
+    @Test
+    public void getIndexMappings_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "get_index_mappings_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().getMapping(new GetMappingsRequest().indices(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .getMapping(new GetMappingsRequest().indices(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().getMapping(new GetMappingsRequest().indices("*"), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/cache/clear"
+    public void clearIndexCache_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("clear_index_cache_positive");
+        IndexOperationsHelper.createIndex(cluster, indexName);
+
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest(indexName);
+            ClearIndicesCacheResponse response = restHighLevelClient.indices().clearCache(clearIndicesCacheRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulClearIndicesCacheResponse());
+        }
+    }
+
+    @Test
+    public void clearIndexCache_negative() throws IOException {
+        String indexThatUserHasNoAccessTo = "clear_index_cache_negative";
+        String indexThatUserHasAccessTo = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat(indexThatUserHasNoAccessTo);
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().clearCache(new ClearIndicesCacheRequest(indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices()
+                    .clearCache(new ClearIndicesCacheRequest(indexThatUserHasAccessTo, indexThatUserHasNoAccessTo), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+            assertThatThrownBy(
+                () -> restHighLevelClient.indices().clearCache(new ClearIndicesCacheRequest("*"), DEFAULT),
+                statusException(FORBIDDEN)
+            );
+        }
+    }
+
+    @Test
+    // required permissions: "indices:admin/create", "indices:admin/aliases"
+    public void shouldCreateIndexWithAlias_positive() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("create_index_with_alias_positive");
+        try (
+            RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(
+                USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES
+            )
+        ) {
+            CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName).alias(
+                new Alias(ALIAS_CREATE_INDEX_WITH_ALIAS_POSITIVE)
+            );
+
+            CreateIndexResponse createIndexResponse = restHighLevelClient.indices().create(createIndexRequest, DEFAULT);
+
+            assertThat(createIndexResponse, isSuccessfulCreateIndexResponse(indexName));
+            assertThat(cluster, indexExists(indexName));
+            assertThat(internalClient, aliasExists(ALIAS_CREATE_INDEX_WITH_ALIAS_POSITIVE));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES).withRestRequest(
+                PUT,
+                "/index_operations_create_index_with_alias_positive"
+            )
+        );
+        auditLogsRule.assertExactly(
+            2,
+            grantedPrivilege(USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES, "CreateIndexRequest")
+        );
+        auditLogsRule.assertExactly(
+            2,
+            auditPredicate(INDEX_EVENT).withEffectiveUser(USER_ALLOWED_TO_PERFORM_INDEX_OPERATIONS_ON_SELECTED_INDICES)
+        );
+    }
+
+    @Test
+    public void shouldCreateIndexWithAlias_negative() throws IOException {
+        String indexName = INDICES_ON_WHICH_USER_CAN_PERFORM_INDEX_OPERATIONS_PREFIX.concat("create_index_with_alias_negative");
+        try (RestHighLevelClient restHighLevelClient = cluster.getRestHighLevelClient(USER_ALLOWED_TO_CREATE_INDEX)) {
+            CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName).alias(
+                new Alias(ALIAS_CREATE_INDEX_WITH_ALIAS_NEGATIVE)
+            );
+
+            assertThatThrownBy(() -> restHighLevelClient.indices().create(createIndexRequest, DEFAULT), statusException(FORBIDDEN));
+
+            assertThat(internalClient, not(aliasExists(ALIAS_CREATE_INDEX_WITH_ALIAS_NEGATIVE)));
+        }
+        auditLogsRule.assertExactlyOne(
+            userAuthenticated(USER_ALLOWED_TO_CREATE_INDEX).withRestRequest(PUT, "/index_operations_create_index_with_alias_negative")
+        );
+        auditLogsRule.assertExactlyOne(missingPrivilege(USER_ALLOWED_TO_CREATE_INDEX, "CreateIndexRequest"));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/SecurityAdminLauncher.java b/src/integrationTest/java/org/opensearch/security/SecurityAdminLauncher.java
new file mode 100644
index 0000000000..164b2cb714
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/SecurityAdminLauncher.java
@@ -0,0 +1,47 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.File;
+
+import org.opensearch.security.tools.SecurityAdmin;
+import org.opensearch.test.framework.certificate.TestCertificates;
+
+import static java.util.Objects.requireNonNull;
+
+class SecurityAdminLauncher {
+
+    private final TestCertificates certificates;
+    private int port;
+
+    public SecurityAdminLauncher(int port, TestCertificates certificates) {
+        this.port = port;
+        this.certificates = requireNonNull(certificates, "Certificates are required to communicate with cluster.");
+    }
+
+    public int updateRoleMappings(File roleMappingsConfigurationFile) throws Exception {
+        String[] commandLineArguments = {
+            "-cacert",
+            certificates.getRootCertificate().getAbsolutePath(),
+            "-cert",
+            certificates.getAdminCertificate().getAbsolutePath(),
+            "-key",
+            certificates.getAdminKey(null).getAbsolutePath(),
+            "-nhnv",
+            "-p",
+            String.valueOf(port),
+            "-f",
+            roleMappingsConfigurationFile.getAbsolutePath(),
+            "-t",
+            "rolesmapping" };
+
+        return SecurityAdmin.execute(commandLineArguments);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java b/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java
new file mode 100644
index 0000000000..b35495e23e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java
@@ -0,0 +1,232 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.awaitility.Awaitility;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+
+import org.opensearch.client.Client;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.security.support.ConfigConstants.SECURITY_BACKGROUND_INIT_IF_SECURITYINDEX_NOT_EXIST;
+import static org.opensearch.security.support.ConfigConstants.SECURITY_RESTAPI_ROLES_ENABLED;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class SecurityConfigurationTests {
+
+    private static final User USER_ADMIN = new User("admin").roles(ALL_ACCESS);
+    private static final User LIMITED_USER = new User("limited-user").roles(
+        new Role("limited-role").indexPermissions("indices:data/read/search", "indices:data/read/get").on("user-${user.name}")
+    );
+    public static final String LIMITED_USER_INDEX = "user-" + LIMITED_USER.getName();
+    public static final String ADDITIONAL_USER_1 = "additional00001";
+    public static final String ADDITIONAL_PASSWORD_1 = "user 1 fair password";
+
+    public static final String ADDITIONAL_USER_2 = "additional2";
+    public static final String ADDITIONAL_PASSWORD_2 = "user 2 fair password";
+    public static final String CREATE_USER_BODY = "{\"password\": \"%s\",\"opendistro_security_roles\": []}";
+    public static final String INTERNAL_USERS_RESOURCE = "_plugins/_security/api/internalusers/";
+    public static final String ID_1 = "one";
+    public static final String PROHIBITED_INDEX = "prohibited";
+    public static final String ID_2 = "two";
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(USER_ADMIN, LIMITED_USER)
+        .anonymousAuth(false)
+        .nodeSettings(
+            Map.of(
+                SECURITY_RESTAPI_ROLES_ENABLED,
+                List.of("user_" + USER_ADMIN.getName() + "__" + ALL_ACCESS.getName()),
+                SECURITY_BACKGROUND_INIT_IF_SECURITYINDEX_NOT_EXIST,
+                true
+            )
+        )
+        .build();
+
+    @Rule
+    public TemporaryFolder configurationDirectory = new TemporaryFolder();
+
+    @BeforeClass
+    public static void initData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.prepareIndex(LIMITED_USER_INDEX).setId(ID_1).setRefreshPolicy(IMMEDIATE).setSource("foo", "bar").get();
+            client.prepareIndex(PROHIBITED_INDEX).setId(ID_2).setRefreshPolicy(IMMEDIATE).setSource("three", "four").get();
+        }
+    }
+
+    @Test
+    public void shouldCreateUserViaRestApi_success() {
+        try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) {
+            HttpResponse httpResponse = client.putJson(
+                INTERNAL_USERS_RESOURCE + ADDITIONAL_USER_1,
+                String.format(CREATE_USER_BODY, ADDITIONAL_PASSWORD_1)
+            );
+
+            assertThat(httpResponse.getStatusCode(), equalTo(201));
+        }
+        try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) {
+            client.assertCorrectCredentials(USER_ADMIN.getName());
+        }
+        try (TestRestClient client = cluster.getRestClient(ADDITIONAL_USER_1, ADDITIONAL_PASSWORD_1)) {
+            client.assertCorrectCredentials(ADDITIONAL_USER_1);
+        }
+    }
+
+    @Test
+    public void shouldCreateUserViaRestApi_failure() {
+        try (TestRestClient client = cluster.getRestClient(LIMITED_USER)) {
+            HttpResponse httpResponse = client.putJson(
+                INTERNAL_USERS_RESOURCE + ADDITIONAL_USER_1,
+                String.format(CREATE_USER_BODY, ADDITIONAL_PASSWORD_1)
+            );
+
+            httpResponse.assertStatusCode(403);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateAsAdminWithCertificate_positive() {
+        try (TestRestClient client = cluster.getRestClient(cluster.getAdminCertificate())) {
+            HttpResponse httpResponse = client.get("/_plugins/_security/whoami");
+
+            httpResponse.assertStatusCode(200);
+            assertThat(httpResponse.getTextFromJsonBody("/is_admin"), equalTo("true"));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateAsAdminWithCertificate_negativeSelfSignedCertificate() {
+        TestCertificates testCertificates = cluster.getTestCertificates();
+        try (TestRestClient client = cluster.getRestClient(testCertificates.createSelfSignedCertificate("CN=bond"))) {
+            HttpResponse httpResponse = client.get("/_plugins/_security/whoami");
+
+            httpResponse.assertStatusCode(200);
+            assertThat(httpResponse.getTextFromJsonBody("/is_admin"), equalTo("false"));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateAsAdminWithCertificate_negativeIncorrectDn() {
+        TestCertificates testCertificates = cluster.getTestCertificates();
+        try (TestRestClient client = cluster.getRestClient(testCertificates.createAdminCertificate("CN=non_admin"))) {
+            HttpResponse httpResponse = client.get("/_plugins/_security/whoami");
+
+            httpResponse.assertStatusCode(200);
+            assertThat(httpResponse.getTextFromJsonBody("/is_admin"), equalTo("false"));
+        }
+    }
+
+    @Test
+    public void shouldCreateUserViaRestApiWhenAdminIsAuthenticatedViaCertificate_positive() {
+        try (TestRestClient client = cluster.getRestClient(cluster.getAdminCertificate())) {
+
+            HttpResponse httpResponse = client.putJson(
+                INTERNAL_USERS_RESOURCE + ADDITIONAL_USER_2,
+                String.format(CREATE_USER_BODY, ADDITIONAL_PASSWORD_2)
+            );
+
+            httpResponse.assertStatusCode(201);
+        }
+        try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) {
+            client.assertCorrectCredentials(USER_ADMIN.getName());
+        }
+        try (TestRestClient client = cluster.getRestClient(ADDITIONAL_USER_2, ADDITIONAL_PASSWORD_2)) {
+            client.assertCorrectCredentials(ADDITIONAL_USER_2);
+        }
+    }
+
+    @Test
+    public void shouldCreateUserViaRestApiWhenAdminIsAuthenticatedViaCertificate_negative() {
+        TestCertificates testCertificates = cluster.getTestCertificates();
+        try (TestRestClient client = cluster.getRestClient(testCertificates.createSelfSignedCertificate("CN=attacker"))) {
+            HttpResponse httpResponse = client.putJson(
+                INTERNAL_USERS_RESOURCE + ADDITIONAL_USER_2,
+                String.format(CREATE_USER_BODY, ADDITIONAL_PASSWORD_2)
+            );
+
+            httpResponse.assertStatusCode(401);
+        }
+    }
+
+    @Test
+    public void shouldStillWorkAfterUpdateOfSecurityConfig() {
+        List<User> users = new ArrayList<>(cluster.getConfiguredUsers());
+        User newUser = new User("new-user");
+        users.add(newUser);
+
+        cluster.updateUserConfiguration(users);
+
+        try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) {
+            client.assertCorrectCredentials(USER_ADMIN.getName());
+        }
+        try (TestRestClient client = cluster.getRestClient(newUser)) {
+            client.assertCorrectCredentials(newUser.getName());
+        }
+    }
+
+    @Test
+    public void shouldAccessIndexWithPlaceholder_positive() {
+        try (TestRestClient client = cluster.getRestClient(LIMITED_USER)) {
+            HttpResponse httpResponse = client.get("/" + LIMITED_USER_INDEX + "/_doc/" + ID_1);
+
+            httpResponse.assertStatusCode(200);
+        }
+    }
+
+    @Test
+    public void shouldAccessIndexWithPlaceholder_negative() {
+        try (TestRestClient client = cluster.getRestClient(LIMITED_USER)) {
+            HttpResponse httpResponse = client.get("/" + PROHIBITED_INDEX + "/_doc/" + ID_2);
+
+            httpResponse.assertStatusCode(403);
+        }
+    }
+
+    @Test
+    public void shouldUseSecurityAdminTool() throws Exception {
+        SecurityAdminLauncher securityAdminLauncher = new SecurityAdminLauncher(cluster.getHttpPort(), cluster.getTestCertificates());
+        File rolesMapping = configurationDirectory.newFile("roles_mapping.yml");
+        ConfigurationFiles.createRoleMappingFile(rolesMapping);
+
+        int exitCode = securityAdminLauncher.updateRoleMappings(rolesMapping);
+
+        assertThat(exitCode, equalTo(0));
+        try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) {
+            Awaitility.await()
+                .alias("Waiting for rolemapping 'readall' availability.")
+                .until(() -> client.get("_plugins/_security/api/rolesmapping/readall").getStatusCode(), equalTo(200));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/SecurityRolesTests.java b/src/integrationTest/java/org/opensearch/security/SecurityRolesTests.java
new file mode 100644
index 0000000000..ce2376c616
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/SecurityRolesTests.java
@@ -0,0 +1,63 @@
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.security;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.HttpStatus;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class SecurityRolesTests {
+
+    protected final static TestSecurityConfig.User USER_SR = new TestSecurityConfig.User("sr_user").roles(
+        new Role("abc_ber").indexPermissions("*").on("*").clusterPermissions("*"),
+        new Role("def_efg").indexPermissions("*").on("*").clusterPermissions("*")
+    );
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(true)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(USER_SR)
+        .build();
+
+    @Test
+    public void testSecurityRoles() throws Exception {
+        try (TestRestClient client = cluster.getRestClient(USER_SR)) {
+            HttpResponse response = client.getAuthInfo();
+            response.assertStatusCode(HttpStatus.SC_OK);
+
+            // Check username
+            assertThat(response.getTextFromJsonBody("/user_name"), equalTo("sr_user"));
+
+            // Check security roles
+            assertThat(response.getTextFromJsonBody("/roles/0"), equalTo("user_sr_user__abc_ber"));
+            assertThat(response.getTextFromJsonBody("/roles/1"), equalTo("user_sr_user__def_efg"));
+
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/security/SnapshotSteps.java b/src/integrationTest/java/org/opensearch/security/SnapshotSteps.java
new file mode 100644
index 0000000000..28aa6abd43
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/SnapshotSteps.java
@@ -0,0 +1,95 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.awaitility.Awaitility;
+
+import org.opensearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
+import org.opensearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
+import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
+import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.opensearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
+import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
+import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
+import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
+import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.client.SnapshotClient;
+import org.opensearch.snapshots.SnapshotInfo;
+import org.opensearch.snapshots.SnapshotState;
+
+import static java.util.Objects.requireNonNull;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+
+class SnapshotSteps {
+
+    private final SnapshotClient snapshotClient;
+
+    public SnapshotSteps(RestHighLevelClient restHighLevelClient) {
+        this.snapshotClient = requireNonNull(restHighLevelClient, "Rest high level client is required.").snapshot();
+    }
+
+    // CS-SUPPRESS-SINGLE: RegexpSingleline It is not possible to use phrase "cluster manager" instead of master here
+    public org.opensearch.action.support.master.AcknowledgedResponse createSnapshotRepository(
+        String repositoryName,
+        String snapshotDirPath,
+        String type
+    )
+        // CS-ENFORCE-SINGLE
+        throws IOException {
+        PutRepositoryRequest createRepositoryRequest = new PutRepositoryRequest().name(repositoryName)
+            .type(type)
+            .settings(Map.of("location", snapshotDirPath));
+        return snapshotClient.createRepository(createRepositoryRequest, DEFAULT);
+    }
+
+    public CreateSnapshotResponse createSnapshot(String repositoryName, String snapshotName, String... indices) throws IOException {
+        CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repositoryName, snapshotName).indices(indices);
+        return snapshotClient.create(createSnapshotRequest, DEFAULT);
+    }
+
+    public void waitForSnapshotCreation(String repositoryName, String snapshotName) {
+        GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repositoryName, new String[] { snapshotName });
+        Awaitility.await().alias("wait for snapshot creation").ignoreExceptions().until(() -> {
+            GetSnapshotsResponse snapshotsResponse = snapshotClient.get(getSnapshotsRequest, DEFAULT);
+            SnapshotInfo snapshotInfo = snapshotsResponse.getSnapshots().get(0);
+            return SnapshotState.SUCCESS.equals(snapshotInfo.state());
+        });
+    }
+
+    // CS-SUPPRESS-SINGLE: RegexpSingleline It is not possible to use phrase "cluster manager" instead of master here
+    public org.opensearch.action.support.master.AcknowledgedResponse deleteSnapshotRepository(String repositoryName) throws IOException {
+        // CS-ENFORCE-SINGLE
+        DeleteRepositoryRequest request = new DeleteRepositoryRequest(repositoryName);
+        return snapshotClient.deleteRepository(request, DEFAULT);
+    }
+
+    // CS-SUPPRESS-SINGLE: RegexpSingleline It is not possible to use phrase "cluster manager" instead of master here
+    public org.opensearch.action.support.master.AcknowledgedResponse deleteSnapshot(String repositoryName, String snapshotName)
+        throws IOException {
+        // CS-ENFORCE-SINGLE
+        return snapshotClient.delete(new DeleteSnapshotRequest(repositoryName, snapshotName), DEFAULT);
+    }
+
+    public RestoreSnapshotResponse restoreSnapshot(
+        String repositoryName,
+        String snapshotName,
+        String renamePattern,
+        String renameReplacement
+    ) throws IOException {
+        RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repositoryName, snapshotName).renamePattern(
+            renamePattern
+        ).renameReplacement(renameReplacement);
+        return snapshotClient.restore(restoreSnapshotRequest, DEFAULT);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/Song.java b/src/integrationTest/java/org/opensearch/security/Song.java
new file mode 100644
index 0000000000..b7e6c4ef05
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/Song.java
@@ -0,0 +1,98 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.util.Map;
+import java.util.Objects;
+
+public class Song {
+
+    public static final String FIELD_TITLE = "title";
+    public static final String FIELD_ARTIST = "artist";
+    public static final String FIELD_LYRICS = "lyrics";
+    public static final String FIELD_STARS = "stars";
+    public static final String FIELD_GENRE = "genre";
+    public static final String ARTIST_FIRST = "First artist";
+    public static final String ARTIST_STRING = "String";
+    public static final String ARTIST_TWINS = "Twins";
+    public static final String TITLE_MAGNUM_OPUS = "Magnum Opus";
+    public static final String TITLE_SONG_1_PLUS_1 = "Song 1+1";
+    public static final String TITLE_NEXT_SONG = "Next song";
+    public static final String ARTIST_NO = "No!";
+    public static final String TITLE_POISON = "Poison";
+
+    public static final String ARTIST_YES = "yes";
+
+    public static final String TITLE_AFFIRMATIVE = "Affirmative";
+
+    public static final String ARTIST_UNKNOWN = "unknown";
+    public static final String TITLE_CONFIDENTIAL = "confidential";
+
+    public static final String LYRICS_1 = "Very deep subject";
+    public static final String LYRICS_2 = "Once upon a time";
+    public static final String LYRICS_3 = "giant nonsense";
+    public static final String LYRICS_4 = "Much too much";
+    public static final String LYRICS_5 = "Little to little";
+    public static final String LYRICS_6 = "confidential secret classified";
+
+    public static final String GENRE_ROCK = "rock";
+    public static final String GENRE_JAZZ = "jazz";
+    public static final String GENRE_BLUES = "blues";
+
+    public static final String QUERY_TITLE_NEXT_SONG = FIELD_TITLE + ":" + "\"" + TITLE_NEXT_SONG + "\"";
+    public static final String QUERY_TITLE_POISON = FIELD_TITLE + ":" + TITLE_POISON;
+    public static final String QUERY_TITLE_MAGNUM_OPUS = FIELD_TITLE + ":" + TITLE_MAGNUM_OPUS;
+
+    public static final Song[] SONGS = {
+        new Song(ARTIST_FIRST, TITLE_MAGNUM_OPUS, LYRICS_1, 1, GENRE_ROCK),
+        new Song(ARTIST_STRING, TITLE_SONG_1_PLUS_1, LYRICS_2, 2, GENRE_BLUES),
+        new Song(ARTIST_TWINS, TITLE_NEXT_SONG, LYRICS_3, 3, GENRE_JAZZ),
+        new Song(ARTIST_NO, TITLE_POISON, LYRICS_4, 4, GENRE_ROCK),
+        new Song(ARTIST_YES, TITLE_AFFIRMATIVE, LYRICS_5, 5, GENRE_BLUES),
+        new Song(ARTIST_UNKNOWN, TITLE_CONFIDENTIAL, LYRICS_6, 6, GENRE_JAZZ) };
+
+    private final String artist;
+    private final String title;
+    private final String lyrics;
+    private final Integer stars;
+    private final String genre;
+
+    public Song(String artist, String title, String lyrics, Integer stars, String genre) {
+        this.artist = Objects.requireNonNull(artist, "Artist is required");
+        this.title = Objects.requireNonNull(title, "Title is required");
+        this.lyrics = Objects.requireNonNull(lyrics, "Lyrics is required");
+        this.stars = Objects.requireNonNull(stars, "Stars field is required");
+        this.genre = Objects.requireNonNull(genre, "Genre field is required");
+    }
+
+    public String getArtist() {
+        return artist;
+    }
+
+    public String getTitle() {
+        return title;
+    }
+
+    public String getLyrics() {
+        return lyrics;
+    }
+
+    public Integer getStars() {
+        return stars;
+    }
+
+    public String getGenre() {
+        return genre;
+    }
+
+    public Map<String, Object> asMap() {
+        return Map.of(FIELD_ARTIST, artist, FIELD_TITLE, title, FIELD_LYRICS, lyrics, FIELD_STARS, stars, FIELD_GENRE, genre);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/SslOnlyTests.java b/src/integrationTest/java/org/opensearch/security/SslOnlyTests.java
new file mode 100644
index 0000000000..25feffb2b4
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/SslOnlyTests.java
@@ -0,0 +1,69 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.security.support.ConfigConstants;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+
+/**
+* Test related to SSL-only mode of security plugin. In this mode, the security plugin is responsible only for TLS/SSL encryption.
+* Therefore, the plugin does not perform authentication and authorization. Moreover, the REST resources (e.g. /_plugins/_security/whoami,
+* /_plugins/_security/authinfo, etc.) provided by the plugin are not available.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class SslOnlyTests {
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(false)
+        .loadConfigurationIntoIndex(false)
+        .nodeSettings(Map.of(ConfigConstants.SECURITY_SSL_ONLY, true))
+        .sslOnly(true)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .build();
+
+    @Test
+    public void shouldNotLoadSecurityPluginResources() {
+        try (TestRestClient client = cluster.getRestClient()) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            // in SSL only mode the security plugin does not register a handler for resource /_plugins/_security/whoami. Therefore error
+            // response is returned.
+            response.assertStatusCode(400);
+        }
+    }
+
+    @Test
+    public void shouldGetIndicesWithoutAuthentication() {
+        try (TestRestClient client = cluster.getRestClient()) {
+
+            // request does not contains credential
+            HttpResponse response = client.get("/_cat/indices");
+
+            // successful response is returned because the security plugin in SSL only mode
+            // does not perform authentication and authorization
+            response.assertStatusCode(200);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/TlsTests.java b/src/integrationTest/java/org/opensearch/security/TlsTests.java
new file mode 100644
index 0000000000..91c81cf04c
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/TlsTests.java
@@ -0,0 +1,106 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import javax.net.ssl.SSLHandshakeException;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.NoHttpResponseException;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.security.auditlog.impl.AuditCategory;
+import org.opensearch.test.framework.AuditCompliance;
+import org.opensearch.test.framework.AuditConfiguration;
+import org.opensearch.test.framework.AuditFilters;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.audit.AuditLogsRule;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.opensearch.security.auditlog.AuditLog.Origin.REST;
+import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED_CIPHERS;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.auditPredicate;
+import static org.opensearch.test.framework.cluster.TestRestClientConfiguration.getBasicAuthHeader;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class TlsTests {
+
+    private static final User USER_ADMIN = new User("admin").roles(ALL_ACCESS);
+
+    public static final String SUPPORTED_CIPHER_SUIT = "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256";
+    public static final String NOT_SUPPORTED_CIPHER_SUITE = "TLS_RSA_WITH_AES_128_CBC_SHA";
+    public static final String AUTH_INFO_ENDPOINT = "/_opendistro/_security/authinfo?pretty";
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(false)
+        .nodeSettings(Map.of(SECURITY_SSL_HTTP_ENABLED_CIPHERS, List.of(SUPPORTED_CIPHER_SUIT)))
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(USER_ADMIN)
+        .audit(
+            new AuditConfiguration(true).compliance(new AuditCompliance().enabled(true))
+                .filters(new AuditFilters().enabledRest(true).enabledTransport(true))
+        )
+        .build();
+
+    @Rule
+    public AuditLogsRule auditLogsRule = new AuditLogsRule();
+
+    @Test
+    public void shouldCreateAuditOnIncomingNonTlsConnection() throws IOException {
+        try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
+            HttpGet request = new HttpGet("http://localhost:" + cluster.getHttpPort());
+
+            assertThatThrownBy(() -> httpClient.execute(request), instanceOf(NoHttpResponseException.class));
+        }
+        auditLogsRule.assertAtLeast(1, auditPredicate(AuditCategory.SSL_EXCEPTION).withLayer(REST));
+    }
+
+    @Test
+    public void shouldSupportClientCipherSuite_positive() throws IOException {
+        try (CloseableHttpClient client = cluster.getClosableHttpClient(new String[] { SUPPORTED_CIPHER_SUIT })) {
+            HttpGet httpGet = new HttpGet("https://localhost:" + cluster.getHttpPort() + AUTH_INFO_ENDPOINT);
+            httpGet.addHeader(getBasicAuthHeader(USER_ADMIN.getName(), USER_ADMIN.getPassword()));
+
+            try (CloseableHttpResponse response = client.execute(httpGet)) {
+
+                int responseStatusCode = response.getStatusLine().getStatusCode();
+                assertThat(responseStatusCode, equalTo(200));
+            }
+        }
+    }
+
+    @Test
+    public void shouldSupportClientCipherSuite_negative() throws IOException {
+        try (CloseableHttpClient client = cluster.getClosableHttpClient(new String[] { NOT_SUPPORTED_CIPHER_SUITE })) {
+            HttpGet httpGet = new HttpGet("https://localhost:" + cluster.getHttpPort() + AUTH_INFO_ENDPOINT);
+
+            assertThatThrownBy(() -> client.execute(httpGet), instanceOf(SSLHandshakeException.class));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/UserBruteForceAttacksPreventionTests.java b/src/integrationTest/java/org/opensearch/security/UserBruteForceAttacksPreventionTests.java
new file mode 100644
index 0000000000..cb61950ada
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/UserBruteForceAttacksPreventionTests.java
@@ -0,0 +1,133 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security;
+
+import java.util.concurrent.TimeUnit;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.AuthFailureListeners;
+import org.opensearch.test.framework.RateLimiting;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static org.apache.http.HttpStatus.SC_OK;
+import static org.apache.http.HttpStatus.SC_UNAUTHORIZED;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class UserBruteForceAttacksPreventionTests {
+
+    private static final User USER_1 = new User("simple-user-1").roles(ALL_ACCESS);
+    private static final User USER_2 = new User("simple-user-2").roles(ALL_ACCESS);
+    private static final User USER_3 = new User("simple-user-3").roles(ALL_ACCESS);
+    private static final User USER_4 = new User("simple-user-4").roles(ALL_ACCESS);
+    private static final User USER_5 = new User("simple-user-5").roles(ALL_ACCESS);
+
+    public static final int ALLOWED_TRIES = 3;
+    public static final int TIME_WINDOW_SECONDS = 3;
+    private static final AuthFailureListeners listener = new AuthFailureListeners().addRateLimit(
+        new RateLimiting("internal_authentication_backend_limiting").type("username")
+            .authenticationBackend("intern")
+            .allowedTries(ALLOWED_TRIES)
+            .timeWindowSeconds(TIME_WINDOW_SECONDS)
+            .blockExpirySeconds(2)
+            .maxBlockedClients(500)
+            .maxTrackedClients(500)
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authFailureListeners(listener)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(USER_1, USER_2, USER_3, USER_4, USER_5)
+        .build();
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("org.opensearch.security.auth.BackendRegistry");
+
+    @Test
+    public void shouldAuthenticateUserWhenBlockadeIsNotActive() {
+        try (TestRestClient client = cluster.getRestClient(USER_1)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void shouldBlockUserWhenNumberOfFailureLoginAttemptIsEqualToLimit() {
+        authenticateUserWithIncorrectPassword(USER_2, ALLOWED_TRIES);
+        try (TestRestClient client = cluster.getRestClient(USER_2)) {
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_UNAUTHORIZED);
+        }
+        // Rejecting REST request because of blocked user:
+        logsRule.assertThatContain("Rejecting REST request because of blocked user: " + USER_2.getName());
+    }
+
+    @Test
+    public void shouldBlockUserWhenNumberOfFailureLoginAttemptIsGreaterThanLimit() {
+        authenticateUserWithIncorrectPassword(USER_3, ALLOWED_TRIES * 2);
+        try (TestRestClient client = cluster.getRestClient(USER_3)) {
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_UNAUTHORIZED);
+        }
+        logsRule.assertThatContain("Rejecting REST request because of blocked user: " + USER_3.getName());
+    }
+
+    @Test
+    public void shouldNotBlockUserWhenNumberOfLoginAttemptIsBelowLimit() {
+        authenticateUserWithIncorrectPassword(USER_4, ALLOWED_TRIES - 1);
+        try (TestRestClient client = cluster.getRestClient(USER_4)) {
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void shouldReleaseLock() throws InterruptedException {
+        authenticateUserWithIncorrectPassword(USER_5, ALLOWED_TRIES);
+        try (TestRestClient client = cluster.getRestClient(USER_5)) {
+            HttpResponse response = client.getAuthInfo();
+            response.assertStatusCode(SC_UNAUTHORIZED);
+            TimeUnit.SECONDS.sleep(TIME_WINDOW_SECONDS);
+
+            response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+        logsRule.assertThatContain("Rejecting REST request because of blocked user: " + USER_5.getName());
+    }
+
+    private static void authenticateUserWithIncorrectPassword(User user, int numberOfAttempts) {
+        try (TestRestClient client = cluster.getRestClient(user.getName(), "incorrect password")) {
+            for (int i = 0; i < numberOfAttempts; ++i) {
+                HttpResponse response = client.getAuthInfo();
+                response.assertStatusCode(SC_UNAUTHORIZED);
+            }
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoTest.java b/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoTest.java
index daf66cdc90..066faa8544 100644
--- a/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoTest.java
+++ b/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoTest.java
@@ -12,6 +12,7 @@
 package org.opensearch.security.api;
 
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.HttpStatus;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/src/integrationTest/java/org/opensearch/security/http/AnonymousAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/AnonymousAuthenticationTest.java
new file mode 100644
index 0000000000..b1c13aeedc
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/AnonymousAuthenticationTest.java
@@ -0,0 +1,129 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.util.List;
+
+import com.carrotsearch.randomizedtesting.RandomizedRunner;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.RolesMapping;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+
+@RunWith(RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class AnonymousAuthenticationTest {
+
+    private static final String DEFAULT_ANONYMOUS_USER_NAME = "opendistro_security_anonymous";
+    private static final String DEFAULT_ANONYMOUS_USER_BACKEND_ROLE_NAME = "opendistro_security_anonymous_backendrole";
+
+    /**
+    * Custom role assigned to the anonymous user via {@link #ANONYMOUS_USER_CUSTOM_ROLE_MAPPING}
+    */
+    private static final TestSecurityConfig.Role ANONYMOUS_USER_CUSTOM_ROLE = new TestSecurityConfig.Role("anonymous_user_custom_role");
+
+    /**
+    * Maps {@link #ANONYMOUS_USER_CUSTOM_ROLE} to {@link #DEFAULT_ANONYMOUS_USER_BACKEND_ROLE_NAME}
+    */
+    private static final RolesMapping ANONYMOUS_USER_CUSTOM_ROLE_MAPPING = new RolesMapping(ANONYMOUS_USER_CUSTOM_ROLE).backendRoles(
+        DEFAULT_ANONYMOUS_USER_BACKEND_ROLE_NAME
+    );
+
+    /**
+    * User who is stored in the internal user database and can authenticate
+    */
+    private static final TestSecurityConfig.User EXISTING_USER = new TestSecurityConfig.User("existing_user").roles(
+        new TestSecurityConfig.Role("existing_user")
+    );
+
+    /**
+    * User who is not stored in the internal user database and can not authenticate
+    */
+    private static final TestSecurityConfig.User NOT_EXISTING_USER = new TestSecurityConfig.User("not_existing_user").roles(
+        new TestSecurityConfig.Role("not_existing_user")
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(true)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(EXISTING_USER)
+        .roles(ANONYMOUS_USER_CUSTOM_ROLE)
+        .rolesMapping(ANONYMOUS_USER_CUSTOM_ROLE_MAPPING)
+        .build();
+
+    private static final String USER_NAME_POINTER = "/user_name";
+    private static final String BACKEND_ROLES_POINTER = "/backend_roles";
+    private static final String ROLES_POINTER = "/roles";
+
+    @Test
+    public void shouldAuthenticate_positive_anonymousUser() {
+        try (TestRestClient client = cluster.getRestClient()) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+
+            String username = response.getTextFromJsonBody(USER_NAME_POINTER);
+            assertThat(username, equalTo(DEFAULT_ANONYMOUS_USER_NAME));
+
+            List<String> backendRoles = response.getTextArrayFromJsonBody(BACKEND_ROLES_POINTER);
+            assertThat(backendRoles, hasSize(1));
+            assertThat(backendRoles, contains(DEFAULT_ANONYMOUS_USER_BACKEND_ROLE_NAME));
+
+            List<String> roles = response.getTextArrayFromJsonBody(ROLES_POINTER);
+            assertThat(roles, hasSize(1));
+            assertThat(roles, contains(ANONYMOUS_USER_CUSTOM_ROLE.getName()));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticate_positive_existingUser() {
+        try (TestRestClient client = cluster.getRestClient(EXISTING_USER)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+
+            String username = response.getTextFromJsonBody(USER_NAME_POINTER);
+            assertThat(username, equalTo(EXISTING_USER.getName()));
+
+            List<String> backendRoles = response.getTextArrayFromJsonBody(BACKEND_ROLES_POINTER);
+            assertThat(backendRoles, hasSize(0));
+
+            List<String> roles = response.getTextArrayFromJsonBody(ROLES_POINTER);
+            assertThat(roles, hasSize(EXISTING_USER.getRoleNames().size()));
+            assertThat(roles, containsInAnyOrder(EXISTING_USER.getRoleNames().toArray()));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticate_negative_notExistingUser() {
+        try (TestRestClient client = cluster.getRestClient(NOT_EXISTING_USER)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/AuthInfo.java b/src/integrationTest/java/org/opensearch/security/http/AuthInfo.java
new file mode 100644
index 0000000000..53ea6ab859
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/AuthInfo.java
@@ -0,0 +1,30 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.beans.ConstructorProperties;
+import java.util.List;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+class AuthInfo {
+
+    private final List<String> customAttributeNames;
+
+    @ConstructorProperties("custom_attribute_names")
+    public AuthInfo(List<String> customAttributeNames) {
+        this.customAttributeNames = customAttributeNames;
+    }
+
+    public List<String> getCustomAttributeNames() {
+        return customAttributeNames;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/BasicAuthTests.java b/src/integrationTest/java/org/opensearch/security/http/BasicAuthTests.java
new file mode 100644
index 0000000000..f6b1672bbe
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/BasicAuthTests.java
@@ -0,0 +1,146 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.util.List;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.HttpHeaders;
+import org.hamcrest.Matchers;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.apache.http.HttpStatus.SC_OK;
+import static org.apache.http.HttpStatus.SC_UNAUTHORIZED;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsStringIgnoringCase;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class BasicAuthTests {
+    static final User TEST_USER = new User("test_user").password("s3cret");
+
+    public static final String CUSTOM_ATTRIBUTE_NAME = "superhero";
+    static final User SUPER_USER = new User("super-user").password("super-password").attr(CUSTOM_ATTRIBUTE_NAME, true);
+    public static final String NOT_EXISTING_USER = "not-existing-user";
+    public static final String INVALID_PASSWORD = "secret-password";
+
+    public static final AuthcDomain AUTHC_DOMAIN = new AuthcDomain("basic", 0).httpAuthenticatorWithChallenge("basic").backend("internal");
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authc(AUTHC_DOMAIN)
+        .users(TEST_USER, SUPER_USER)
+        .build();
+
+    @Test
+    public void shouldRespondWith401WhenUserDoesNotExist() {
+        try (TestRestClient client = cluster.getRestClient(NOT_EXISTING_USER, INVALID_PASSWORD)) {
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_UNAUTHORIZED);
+        }
+    }
+
+    @Test
+    public void shouldRespondWith401WhenUserNameIsIncorrect() {
+        try (TestRestClient client = cluster.getRestClient(NOT_EXISTING_USER, TEST_USER.getPassword())) {
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_UNAUTHORIZED);
+        }
+    }
+
+    @Test
+    public void shouldRespondWith401WhenPasswordIsIncorrect() {
+        try (TestRestClient client = cluster.getRestClient(TEST_USER.getName(), INVALID_PASSWORD)) {
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_UNAUTHORIZED);
+        }
+    }
+
+    @Test
+    public void shouldRespondWith200WhenCredentialsAreCorrect() {
+        try (TestRestClient client = cluster.getRestClient(TEST_USER)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void testBrowserShouldRequestForCredentials() {
+        try (TestRestClient client = cluster.getRestClient()) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_UNAUTHORIZED);
+            assertThatBrowserAskUserForCredentials(response);
+        }
+    }
+
+    @Test
+    public void testUserShouldNotHaveAssignedCustomAttributes() {
+        try (TestRestClient client = cluster.getRestClient(TEST_USER)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_OK);
+            AuthInfo authInfo = response.getBodyAs(AuthInfo.class);
+            assertThat(authInfo, is(notNullValue()));
+            assertThat(authInfo.getCustomAttributeNames(), is(notNullValue()));
+            assertThat(authInfo.getCustomAttributeNames(), hasSize(0));
+        }
+    }
+
+    @Test
+    public void testUserShouldHaveAssignedCustomAttributes() {
+        try (TestRestClient client = cluster.getRestClient(SUPER_USER)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            assertThat(response, is(notNullValue()));
+            response.assertStatusCode(SC_OK);
+            AuthInfo authInfo = response.getBodyAs(AuthInfo.class);
+            assertThat(authInfo, is(notNullValue()));
+            List<String> customAttributeNames = authInfo.getCustomAttributeNames();
+            assertThat(customAttributeNames, is(notNullValue()));
+            assertThat(customAttributeNames, hasSize(1));
+            assertThat(customAttributeNames.get(0), Matchers.equalTo("attr.internal." + CUSTOM_ATTRIBUTE_NAME));
+        }
+    }
+
+    private void assertThatBrowserAskUserForCredentials(HttpResponse response) {
+        String reason = "Browser does not ask user for credentials";
+        assertThat(reason, response.containHeader(HttpHeaders.WWW_AUTHENTICATE), equalTo(true));
+        assertThat(response.getHeader(HttpHeaders.WWW_AUTHENTICATE).getValue(), containsStringIgnoringCase("basic"));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/BasicAuthWithoutChallengeTests.java b/src/integrationTest/java/org/opensearch/security/http/BasicAuthWithoutChallengeTests.java
new file mode 100644
index 0000000000..d602d0920b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/BasicAuthWithoutChallengeTests.java
@@ -0,0 +1,52 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.HttpHeaders;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL_WITHOUT_CHALLENGE;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class BasicAuthWithoutChallengeTests {
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authc(AUTHC_HTTPBASIC_INTERNAL_WITHOUT_CHALLENGE)
+        .build();
+
+    @Test
+    public void browserShouldNotRequestUserForCredentials() {
+        try (TestRestClient client = cluster.getRestClient()) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            assertThatBrowserDoesNotAskUserForCredentials(response);
+        }
+    }
+
+    private void assertThatBrowserDoesNotAskUserForCredentials(HttpResponse response) {
+        String reason = "Browser asked user for credentials which is not expected";
+        assertThat(reason, response.containHeader(HttpHeaders.WWW_AUTHENTICATE), equalTo(false));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/CertificateAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/CertificateAuthenticationTest.java
new file mode 100644
index 0000000000..7c4d05b714
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/CertificateAuthenticationTest.java
@@ -0,0 +1,148 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.RolesMapping;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.certificate.CertificateData;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.apache.http.HttpStatus.SC_OK;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.hasSize;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class CertificateAuthenticationTest {
+
+    private static final User USER_ADMIN = new User("admin").roles(ALL_ACCESS);
+
+    public static final String POINTER_BACKEND_ROLES = "/backend_roles";
+    public static final String POINTER_ROLES = "/roles";
+
+    private static final String USER_SPOCK = "spock";
+    private static final String USER_KIRK = "kirk";
+
+    private static final String BACKEND_ROLE_BRIDGE = "bridge";
+    private static final String BACKEND_ROLE_CAPTAIN = "captain";
+
+    private static final Role ROLE_ALL_INDEX_SEARCH = new Role("all-index-search").indexPermissions("indices:data/read/search").on("*");
+
+    private static final Map<String, Object> CERT_AUTH_CONFIG = Map.of("username_attribute", "cn", "roles_attribute", "ou");
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().nodeSettings(
+        Map.of("plugins.security.ssl.http.clientauth_mode", "OPTIONAL")
+    )
+        .clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .anonymousAuth(false)
+        .authc(
+            new AuthcDomain("clientcert_auth_domain", -1, true).httpAuthenticator(
+                new HttpAuthenticator("clientcert").challenge(false).config(CERT_AUTH_CONFIG)
+            ).backend("noop")
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .roles(ROLE_ALL_INDEX_SEARCH)
+        .users(USER_ADMIN)
+        .rolesMapping(new RolesMapping(ROLE_ALL_INDEX_SEARCH).backendRoles(BACKEND_ROLE_BRIDGE))
+        .build();
+
+    private static final TestCertificates TEST_CERTIFICATES = cluster.getTestCertificates();
+
+    @Test
+    public void shouldAuthenticateUserWithBasicAuthWhenCertificateAuthenticationIsConfigured() {
+        try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_OK);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithCertificate_positiveUserSpoke() {
+        CertificateData userSpockCertificate = TEST_CERTIFICATES.issueUserCertificate(BACKEND_ROLE_BRIDGE, USER_SPOCK);
+        try (TestRestClient client = cluster.getRestClient(userSpockCertificate)) {
+
+            client.assertCorrectCredentials(USER_SPOCK);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithCertificate_positiveUserKirk() {
+        CertificateData userSpockCertificate = TEST_CERTIFICATES.issueUserCertificate(BACKEND_ROLE_BRIDGE, USER_KIRK);
+        try (TestRestClient client = cluster.getRestClient(userSpockCertificate)) {
+
+            client.assertCorrectCredentials(USER_KIRK);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithCertificate_negative() {
+        CertificateData untrustedUserCertificate = TEST_CERTIFICATES.createSelfSignedCertificate("CN=untrusted");
+        try (TestRestClient client = cluster.getRestClient(untrustedUserCertificate)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+        }
+    }
+
+    @Test
+    public void shouldRetrieveBackendRoleFromCertificate_positiveRoleBridge() {
+        CertificateData userSpockCertificate = TEST_CERTIFICATES.issueUserCertificate(BACKEND_ROLE_BRIDGE, USER_KIRK);
+        try (TestRestClient client = cluster.getRestClient(userSpockCertificate)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(1));
+            assertThat(backendRoles, containsInAnyOrder(BACKEND_ROLE_BRIDGE));
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_ROLES);
+            assertThat(roles, hasSize(1));
+            assertThat(roles, containsInAnyOrder(ROLE_ALL_INDEX_SEARCH.getName()));
+        }
+    }
+
+    @Test
+    public void shouldRetrieveBackendRoleFromCertificate_positiveRoleCaptain() {
+        CertificateData userSpockCertificate = TEST_CERTIFICATES.issueUserCertificate(BACKEND_ROLE_CAPTAIN, USER_KIRK);
+        try (TestRestClient client = cluster.getRestClient(userSpockCertificate)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(1));
+            assertThat(backendRoles, containsInAnyOrder(BACKEND_ROLE_CAPTAIN));
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_ROLES);
+            assertThat(roles, hasSize(0));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/CommonProxyAuthenticationTests.java b/src/integrationTest/java/org/opensearch/security/http/CommonProxyAuthenticationTests.java
new file mode 100644
index 0000000000..49ded4f2a9
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/CommonProxyAuthenticationTests.java
@@ -0,0 +1,255 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.util.List;
+
+import org.opensearch.test.framework.RolesMapping;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClientConfiguration;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+/**
+* Class defines common tests for proxy and extended-proxy authentication. Subclasses are used to run tests.
+*/
+abstract class CommonProxyAuthenticationTests {
+
+    protected static final String RESOURCE_AUTH_INFO = "/_opendistro/_security/authinfo";
+    protected static final TestSecurityConfig.User USER_ADMIN = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    protected static final String ATTRIBUTE_DEPARTMENT = "department";
+    protected static final String ATTRIBUTE_SKILLS = "skills";
+
+    protected static final String USER_ATTRIBUTE_DEPARTMENT_NAME = "attr.proxy." + ATTRIBUTE_DEPARTMENT;
+    protected static final String USER_ATTRIBUTE_SKILLS_NAME = "attr.proxy." + ATTRIBUTE_SKILLS;
+    protected static final String USER_ATTRIBUTE_USERNAME_NAME = "attr.proxy.username";
+
+    protected static final String HEADER_PREFIX_CUSTOM_ATTRIBUTES = "x-custom-attr";
+    protected static final String HEADER_PROXY_USER = "x-proxy-user";
+    protected static final String HEADER_PROXY_ROLES = "x-proxy-roles";
+    protected static final String HEADER_FORWARDED_FOR = "X-Forwarded-For";
+    protected static final String HEADER_DEPARTMENT = HEADER_PREFIX_CUSTOM_ATTRIBUTES + ATTRIBUTE_DEPARTMENT;
+    protected static final String HEADER_SKILLS = HEADER_PREFIX_CUSTOM_ATTRIBUTES + ATTRIBUTE_SKILLS;
+
+    protected static final String IP_PROXY = "127.0.0.10";
+    protected static final String IP_NON_PROXY = "127.0.0.5";
+    protected static final String IP_CLIENT = "127.0.0.1";
+
+    protected static final String USER_KIRK = "kirk";
+    protected static final String USER_SPOCK = "spock";
+
+    protected static final String BACKEND_ROLE_FIRST_MATE = "firstMate";
+    protected static final String BACKEND_ROLE_CAPTAIN = "captain";
+    protected static final String DEPARTMENT_BRIDGE = "bridge";
+
+    protected static final String PERSONAL_INDEX_NAME_PATTERN = "personal-${"
+        + USER_ATTRIBUTE_DEPARTMENT_NAME
+        + "}-${"
+        + USER_ATTRIBUTE_USERNAME_NAME
+        + "}";
+    protected static final String PERSONAL_INDEX_NAME_SPOCK = "personal-" + DEPARTMENT_BRIDGE + "-" + USER_SPOCK;
+    protected static final String PERSONAL_INDEX_NAME_KIRK = "personal-" + DEPARTMENT_BRIDGE + "-" + USER_KIRK;
+
+    protected static final String POINTER_USERNAME = "/user_name";
+    protected static final String POINTER_BACKEND_ROLES = "/backend_roles";
+    protected static final String POINTER_ROLES = "/roles";
+    protected static final String POINTER_CUSTOM_ATTRIBUTES = "/custom_attribute_names";
+    protected static final String POINTER_TOTAL_HITS = "/hits/total/value";
+    protected static final String POINTER_FIRST_DOCUMENT_ID = "/hits/hits/0/_id";
+    protected static final String POINTER_FIRST_DOCUMENT_INDEX = "/hits/hits/0/_index";
+    protected static final String POINTER_FIRST_DOCUMENT_SOURCE_TITLE = "/hits/hits/0/_source/title";
+
+    protected static final TestSecurityConfig.Role ROLE_ALL_INDEX_SEARCH = new TestSecurityConfig.Role("all-index-search").indexPermissions(
+        "indices:data/read/search"
+    ).on("*");
+
+    protected static final TestSecurityConfig.Role ROLE_PERSONAL_INDEX_SEARCH = new TestSecurityConfig.Role("personal-index-search")
+        .indexPermissions("indices:data/read/search")
+        .on(PERSONAL_INDEX_NAME_PATTERN);
+
+    protected static final RolesMapping ROLES_MAPPING_CAPTAIN = new RolesMapping(ROLE_PERSONAL_INDEX_SEARCH).backendRoles(
+        BACKEND_ROLE_CAPTAIN
+    );
+
+    protected static final RolesMapping ROLES_MAPPING_FIRST_MATE = new RolesMapping(ROLE_ALL_INDEX_SEARCH).backendRoles(
+        BACKEND_ROLE_FIRST_MATE
+    );
+
+    protected abstract LocalCluster getCluster();
+
+    protected void shouldAuthenticateWithBasicAuthWhenProxyAuthenticationIsConfigured() {
+        try (TestRestClient client = getCluster().getRestClient(USER_ADMIN)) {
+            TestRestClient.HttpResponse response = client.get(RESOURCE_AUTH_INFO);
+
+            response.assertStatusCode(200);
+        }
+    }
+
+    protected void shouldAuthenticateWithProxy_positiveUserKirk() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_KIRK).header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USER_KIRK));
+        }
+    }
+
+    protected void shouldAuthenticateWithProxy_positiveUserSpock() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_SPOCK).header(HEADER_PROXY_ROLES, BACKEND_ROLE_FIRST_MATE);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USER_SPOCK));
+        }
+    }
+
+    protected void shouldAuthenticateWithProxy_negativeWhenXffHeaderIsMissing() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_PROXY_USER, USER_KIRK).header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+        }
+    }
+
+    protected void shouldAuthenticateWithProxy_negativeWhenUserNameHeaderIsMissing() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+        }
+    }
+
+    protected void shouldAuthenticateWithProxyWhenRolesHeaderIsMissing() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_KIRK);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USER_KIRK));
+        }
+    }
+
+    protected void shouldAuthenticateWithProxy_negativeWhenRequestWasNotSendByProxy() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_NON_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_KIRK);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+        }
+    }
+
+    protected void shouldRetrieveEmptyListOfRoles() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_SPOCK);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(0));
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_ROLES);
+            assertThat(roles, hasSize(0));
+        }
+    }
+
+    protected void shouldRetrieveSingleRoleFirstMate() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_SPOCK).header(HEADER_PROXY_ROLES, BACKEND_ROLE_FIRST_MATE);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(1));
+            assertThat(backendRoles, contains(BACKEND_ROLE_FIRST_MATE));
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_ROLES);
+            assertThat(roles, hasSize(1));
+            assertThat(roles, contains(ROLE_ALL_INDEX_SEARCH.getName()));
+        }
+    }
+
+    protected void shouldRetrieveSingleRoleCaptain() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        ).header(HEADER_FORWARDED_FOR, IP_CLIENT).header(HEADER_PROXY_USER, USER_SPOCK).header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(1));
+            assertThat(backendRoles, contains(BACKEND_ROLE_CAPTAIN));
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_ROLES);
+            assertThat(roles, hasSize(1));
+            assertThat(roles, contains(ROLE_PERSONAL_INDEX_SEARCH.getName()));
+        }
+    }
+
+    protected void shouldRetrieveMultipleRoles() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        )
+            .header(HEADER_FORWARDED_FOR, IP_CLIENT)
+            .header(HEADER_PROXY_USER, USER_SPOCK)
+            .header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN + "," + BACKEND_ROLE_FIRST_MATE);
+        try (TestRestClient client = getCluster().createGenericClientRestClient(testRestClientConfiguration)) {
+
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(2));
+            assertThat(backendRoles, containsInAnyOrder(BACKEND_ROLE_CAPTAIN, BACKEND_ROLE_FIRST_MATE));
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_ROLES);
+            assertThat(roles, hasSize(2));
+            assertThat(roles, containsInAnyOrder(ROLE_PERSONAL_INDEX_SEARCH.getName(), ROLE_ALL_INDEX_SEARCH.getName()));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/DirectoryInformationTrees.java b/src/integrationTest/java/org/opensearch/security/http/DirectoryInformationTrees.java
new file mode 100644
index 0000000000..3f9c220923
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/DirectoryInformationTrees.java
@@ -0,0 +1,123 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import org.opensearch.test.framework.ldap.LdifBuilder;
+import org.opensearch.test.framework.ldap.LdifData;
+
+class DirectoryInformationTrees {
+
+    public static final String DN_PEOPLE_TEST_ORG = "ou=people,o=test.org";
+    public static final String DN_OPEN_SEARCH_PEOPLE_TEST_ORG = "cn=Open Search,ou=people,o=test.org";
+    public static final String DN_CHRISTPHER_PEOPLE_TEST_ORG = "cn=Christpher,ou=people,o=test.org";
+    public static final String DN_KIRK_PEOPLE_TEST_ORG = "cn=Kirk,ou=people,o=test.org";
+    public static final String DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG = "cn=Captain Spock,ou=people,o=test.org";
+    public static final String DN_LEONARD_PEOPLE_TEST_ORG = "cn=Leonard,ou=people,o=test.org";
+    public static final String DN_JEAN_PEOPLE_TEST_ORG = "cn=Jean,ou=people,o=test.org";
+    public static final String DN_GROUPS_TEST_ORG = "ou=groups,o=test.org";
+    public static final String DN_BRIDGE_GROUPS_TEST_ORG = "cn=bridge,ou=groups,o=test.org";
+
+    public static final String USER_KIRK = "kirk";
+    public static final String PASSWORD_KIRK = "kirk-secret";
+    public static final String USER_SPOCK = "spock";
+    public static final String PASSWORD_SPOCK = "spocksecret";
+    public static final String USER_OPENS = "opens";
+    public static final String PASSWORD_OPEN_SEARCH = "open_search-secret";
+    public static final String USER_JEAN = "jean";
+    public static final String PASSWORD_JEAN = "jeansecret";
+    public static final String USER_LEONARD = "leonard";
+    public static final String PASSWORD_LEONARD = "Leonard-secret";
+    public static final String PASSWORD_CHRISTPHER = "christpher_secret";
+
+    public static final String CN_GROUP_ADMIN = "admin";
+    public static final String CN_GROUP_CREW = "crew";
+    public static final String CN_GROUP_BRIDGE = "bridge";
+
+    public static final String USER_SEARCH = "(uid={0})";
+    public static final String USERNAME_ATTRIBUTE = "uid";
+
+    static final LdifData LDIF_DATA = new LdifBuilder().root("o=test.org")
+        .dc("TEST")
+        .classes("top", "domain")
+        .newRecord(DN_PEOPLE_TEST_ORG)
+        .ou("people")
+        .classes("organizationalUnit", "top")
+        .newRecord(DN_OPEN_SEARCH_PEOPLE_TEST_ORG)
+        .classes("inetOrgPerson")
+        .cn("Open Search")
+        .sn("Search")
+        .uid(USER_OPENS)
+        .userPassword(PASSWORD_OPEN_SEARCH)
+        .mail("open.search@example.com")
+        .ou("Human Resources")
+        .newRecord(DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG)
+        .classes("inetOrgPerson")
+        .cn("Captain Spock")
+        .sn(USER_SPOCK)
+        .uid(USER_SPOCK)
+        .userPassword(PASSWORD_SPOCK)
+        .mail("spock@example.com")
+        .ou("Human Resources")
+        .newRecord(DN_KIRK_PEOPLE_TEST_ORG)
+        .classes("inetOrgPerson")
+        .cn("Kirk")
+        .sn("Kirk")
+        .uid(USER_KIRK)
+        .userPassword(PASSWORD_KIRK)
+        .mail("spock@example.com")
+        .ou("Human Resources")
+        .newRecord(DN_CHRISTPHER_PEOPLE_TEST_ORG)
+        .classes("inetOrgPerson")
+        .cn("Christpher")
+        .sn("Christpher")
+        .uid("christpher")
+        .userPassword(PASSWORD_CHRISTPHER)
+        .mail("christpher@example.com")
+        .ou("Human Resources")
+        .newRecord(DN_LEONARD_PEOPLE_TEST_ORG)
+        .classes("inetOrgPerson")
+        .cn("Leonard")
+        .sn("Leonard")
+        .uid(USER_LEONARD)
+        .userPassword(PASSWORD_LEONARD)
+        .mail("leonard@example.com")
+        .ou("Human Resources")
+        .newRecord(DN_JEAN_PEOPLE_TEST_ORG)
+        .classes("inetOrgPerson")
+        .cn("Jean")
+        .sn("Jean")
+        .uid(USER_JEAN)
+        .userPassword(PASSWORD_JEAN)
+        .mail("jean@example.com")
+        .ou("Human Resources")
+        .newRecord(DN_GROUPS_TEST_ORG)
+        .ou("groups")
+        .cn("groupsRoot")
+        .classes("groupofuniquenames", "top")
+        .newRecord("cn=admin,ou=groups,o=test.org")
+        .ou("groups")
+        .cn(CN_GROUP_ADMIN)
+        .uniqueMember(DN_KIRK_PEOPLE_TEST_ORG)
+        .classes("groupofuniquenames", "top")
+        .newRecord("cn=crew,ou=groups,o=test.org")
+        .ou("groups")
+        .cn(CN_GROUP_CREW)
+        .uniqueMember(DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG)
+        .uniqueMember(DN_CHRISTPHER_PEOPLE_TEST_ORG)
+        .uniqueMember(DN_BRIDGE_GROUPS_TEST_ORG)
+        .classes("groupofuniquenames", "top")
+        .newRecord(DN_BRIDGE_GROUPS_TEST_ORG)
+        .ou("groups")
+        .cn(CN_GROUP_BRIDGE)
+        .uniqueMember(DN_JEAN_PEOPLE_TEST_ORG)
+        .classes("groupofuniquenames", "top")
+        .buildRecord()
+        .buildLdif();
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/DisabledBasicAuthTests.java b/src/integrationTest/java/org/opensearch/security/http/DisabledBasicAuthTests.java
new file mode 100644
index 0000000000..1ae3322a1e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/DisabledBasicAuthTests.java
@@ -0,0 +1,48 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+
+import static org.apache.http.HttpStatus.SC_UNAUTHORIZED;
+import static org.opensearch.security.http.BasicAuthTests.TEST_USER;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.DISABLED_AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.JWT_AUTH_DOMAIN;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class DisabledBasicAuthTests {
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authc(DISABLED_AUTHC_HTTPBASIC_INTERNAL)
+        .users(TEST_USER)
+        .authc(JWT_AUTH_DOMAIN)
+        .build();
+
+    @Test
+    public void shouldRespondWith401WhenCredentialsAreCorrectButBasicAuthIsDisabled() {
+        try (TestRestClient client = cluster.getRestClient(TEST_USER)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(SC_UNAUTHORIZED);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/ExtendedProxyAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/ExtendedProxyAuthenticationTest.java
new file mode 100644
index 0000000000..6fcc7eac83
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/ExtendedProxyAuthenticationTest.java
@@ -0,0 +1,260 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.client.Client;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AuthenticationBackend;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.XffConfig;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+import org.opensearch.test.framework.cluster.TestRestClientConfiguration;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.Song.TITLE_MAGNUM_OPUS;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+
+/**
+* Class used to run tests defined in supper class and adds tests specific for <code>extended-proxy</code> authentication.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class ExtendedProxyAuthenticationTest extends CommonProxyAuthenticationTests {
+
+    public static final String ID_ONE_1 = "one#1";
+    public static final String ID_TWO_2 = "two#2";
+    public static final Map<String, Object> PROXY_AUTHENTICATOR_CONFIG = Map.of(
+        "user_header",
+        HEADER_PROXY_USER,
+        "roles_header",
+        HEADER_PROXY_ROLES,
+        "attr_header_prefix",
+        HEADER_PREFIX_CUSTOM_ATTRIBUTES
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .xff(new XffConfig(true).internalProxiesRegexp("127\\.0\\.0\\.10"))
+        .authc(
+            new AuthcDomain("proxy_auth_domain", -5, true).httpAuthenticator(
+                new HttpAuthenticator("extended-proxy").challenge(false).config(PROXY_AUTHENTICATOR_CONFIG)
+            ).backend(new AuthenticationBackend("noop"))
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(USER_ADMIN)
+        .roles(ROLE_ALL_INDEX_SEARCH, ROLE_PERSONAL_INDEX_SEARCH)
+        .rolesMapping(ROLES_MAPPING_CAPTAIN, ROLES_MAPPING_FIRST_MATE)
+        .build();
+
+    @Override
+    protected LocalCluster getCluster() {
+        return cluster;
+    }
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.prepareIndex(PERSONAL_INDEX_NAME_SPOCK).setId(ID_ONE_1).setRefreshPolicy(IMMEDIATE).setSource(SONGS[0].asMap()).get();
+            client.prepareIndex(PERSONAL_INDEX_NAME_KIRK).setId(ID_TWO_2).setRefreshPolicy(IMMEDIATE).setSource(SONGS[1].asMap()).get();
+        }
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithBasicAuthWhenProxyAuthenticationIsConfigured() {
+        super.shouldAuthenticateWithBasicAuthWhenProxyAuthenticationIsConfigured();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_positiveUserKirk() throws IOException {
+        super.shouldAuthenticateWithProxy_positiveUserKirk();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_positiveUserSpock() throws IOException {
+        super.shouldAuthenticateWithProxy_positiveUserSpock();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_negativeWhenXffHeaderIsMissing() throws IOException {
+        super.shouldAuthenticateWithProxy_negativeWhenXffHeaderIsMissing();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_negativeWhenUserNameHeaderIsMissing() throws IOException {
+        super.shouldAuthenticateWithProxy_negativeWhenUserNameHeaderIsMissing();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxyWhenRolesHeaderIsMissing() throws IOException {
+        super.shouldAuthenticateWithProxyWhenRolesHeaderIsMissing();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_negativeWhenRequestWasNotSendByProxy() throws IOException {
+        super.shouldAuthenticateWithProxy_negativeWhenRequestWasNotSendByProxy();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveEmptyListOfRoles() throws IOException {
+        super.shouldRetrieveEmptyListOfRoles();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveSingleRoleFirstMate() throws IOException {
+        super.shouldRetrieveSingleRoleFirstMate();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveSingleRoleCaptain() throws IOException {
+        super.shouldRetrieveSingleRoleCaptain();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveMultipleRoles() throws IOException {
+        super.shouldRetrieveMultipleRoles();
+    }
+
+    // tests specific for extended proxy authentication
+
+    @Test
+    public void shouldRetrieveCustomAttributeNameDepartment() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        )
+            .header(HEADER_FORWARDED_FOR, IP_CLIENT)
+            .header(HEADER_PROXY_USER, USER_SPOCK)
+            .header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN)
+            .header(HEADER_DEPARTMENT, DEPARTMENT_BRIDGE);
+        try (TestRestClient client = cluster.createGenericClientRestClient(testRestClientConfiguration)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> customAttributes = response.getTextArrayFromJsonBody(POINTER_CUSTOM_ATTRIBUTES);
+            assertThat(customAttributes, hasSize(2));
+            assertThat(customAttributes, containsInAnyOrder(USER_ATTRIBUTE_USERNAME_NAME, USER_ATTRIBUTE_DEPARTMENT_NAME));
+        }
+    }
+
+    @Test
+    public void shouldRetrieveCustomAttributeNameSkills() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        )
+            .header(HEADER_FORWARDED_FOR, IP_CLIENT)
+            .header(HEADER_PROXY_USER, USER_SPOCK)
+            .header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN)
+            .header(HEADER_SKILLS, "bilocation");
+        try (TestRestClient client = cluster.createGenericClientRestClient(testRestClientConfiguration)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> customAttributes = response.getTextArrayFromJsonBody(POINTER_CUSTOM_ATTRIBUTES);
+            assertThat(customAttributes, hasSize(2));
+            assertThat(customAttributes, containsInAnyOrder(USER_ATTRIBUTE_USERNAME_NAME, USER_ATTRIBUTE_SKILLS_NAME));
+        }
+    }
+
+    @Test
+    public void shouldRetrieveMultipleCustomAttributes() throws IOException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        )
+            .header(HEADER_FORWARDED_FOR, IP_CLIENT)
+            .header(HEADER_PROXY_USER, USER_SPOCK)
+            .header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN)
+            .header(HEADER_DEPARTMENT, DEPARTMENT_BRIDGE)
+            .header(HEADER_SKILLS, "bilocation");
+        try (TestRestClient client = cluster.createGenericClientRestClient(testRestClientConfiguration)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> customAttributes = response.getTextArrayFromJsonBody(POINTER_CUSTOM_ATTRIBUTES);
+            assertThat(customAttributes, hasSize(3));
+            assertThat(
+                customAttributes,
+                containsInAnyOrder(USER_ATTRIBUTE_DEPARTMENT_NAME, USER_ATTRIBUTE_USERNAME_NAME, USER_ATTRIBUTE_SKILLS_NAME)
+            );
+        }
+    }
+
+    @Test
+    public void shouldRetrieveUserRolesAndAttributesSoThatAccessToPersonalIndexIsPossible_positive() throws UnknownHostException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        )
+            .header(HEADER_FORWARDED_FOR, IP_CLIENT)
+            .header(HEADER_PROXY_USER, USER_SPOCK)
+            .header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN)
+            .header(HEADER_DEPARTMENT, DEPARTMENT_BRIDGE);
+        try (TestRestClient client = cluster.createGenericClientRestClient(testRestClientConfiguration)) {
+
+            HttpResponse response = client.get("/" + PERSONAL_INDEX_NAME_SPOCK + "/_search");
+
+            response.assertStatusCode(200);
+            assertThat(response.getLongFromJsonBody(POINTER_TOTAL_HITS), equalTo(1L));
+            assertThat(response.getTextFromJsonBody(POINTER_FIRST_DOCUMENT_ID), equalTo(ID_ONE_1));
+            assertThat(response.getTextFromJsonBody(POINTER_FIRST_DOCUMENT_INDEX), equalTo(PERSONAL_INDEX_NAME_SPOCK));
+            assertThat(response.getTextFromJsonBody(POINTER_FIRST_DOCUMENT_SOURCE_TITLE), equalTo(TITLE_MAGNUM_OPUS));
+        }
+    }
+
+    @Test
+    public void shouldRetrieveUserRolesAndAttributesSoThatAccessToPersonalIndexIsPossible_negative() throws UnknownHostException {
+        TestRestClientConfiguration testRestClientConfiguration = new TestRestClientConfiguration().sourceInetAddress(
+            InetAddress.getByName(IP_PROXY)
+        )
+            .header(HEADER_FORWARDED_FOR, IP_CLIENT)
+            .header(HEADER_PROXY_USER, USER_SPOCK)
+            .header(HEADER_PROXY_ROLES, BACKEND_ROLE_CAPTAIN)
+            .header(HEADER_DEPARTMENT, DEPARTMENT_BRIDGE);
+        try (TestRestClient client = cluster.createGenericClientRestClient(testRestClientConfiguration)) {
+
+            HttpResponse response = client.get("/" + PERSONAL_INDEX_NAME_KIRK + "/_search");
+
+            response.assertStatusCode(403);
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/JwtAuthenticationTests.java b/src/integrationTest/java/org/opensearch/security/http/JwtAuthenticationTests.java
new file mode 100644
index 0000000000..dd7690ec34
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/JwtAuthenticationTests.java
@@ -0,0 +1,270 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.io.IOException;
+import java.security.KeyPair;
+import java.util.Base64;
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import io.jsonwebtoken.SignatureAlgorithm;
+import io.jsonwebtoken.security.Keys;
+import org.apache.http.Header;
+import org.apache.http.message.BasicHeader;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.client.Client;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.test.framework.JwtConfigBuilder;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static java.nio.charset.StandardCharsets.US_ASCII;
+import static org.apache.http.HttpHeaders.AUTHORIZATION;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.core.rest.RestStatus.FORBIDDEN;
+import static org.opensearch.security.Song.FIELD_TITLE;
+import static org.opensearch.security.Song.QUERY_TITLE_MAGNUM_OPUS;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.Song.TITLE_MAGNUM_OPUS;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.BASIC_AUTH_DOMAIN_ORDER;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.queryStringQueryRequest;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.statusException;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfTotalHitsIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitContainsFieldWithValue;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentWithId;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class JwtAuthenticationTests {
+
+    public static final String CLAIM_USERNAME = "preferred-username";
+    public static final String CLAIM_ROLES = "backend-user-roles";
+
+    public static final String USER_SUPERHERO = "superhero";
+    public static final String USERNAME_ROOT = "root";
+    public static final String ROLE_ADMIN = "role_admin";
+    public static final String ROLE_DEVELOPER = "role_developer";
+    public static final String ROLE_QA = "role_qa";
+    public static final String ROLE_CTO = "role_cto";
+    public static final String ROLE_CEO = "role_ceo";
+    public static final String ROLE_VP = "role_vp";
+    public static final String POINTER_BACKEND_ROLES = "/backend_roles";
+    public static final String POINTER_USERNAME = "/user_name";
+
+    public static final String QA_DEPARTMENT = "qa-department";
+
+    public static final String CLAIM_DEPARTMENT = "department";
+
+    public static final String DEPARTMENT_SONG_INDEX_PATTERN = String.format("song_lyrics_${attr.jwt.%s}", CLAIM_DEPARTMENT);
+
+    public static final String QA_SONG_INDEX_NAME = String.format("song_lyrics_%s", QA_DEPARTMENT);
+
+    private static final KeyPair KEY_PAIR = Keys.keyPairFor(SignatureAlgorithm.RS256);
+    private static final String PUBLIC_KEY = new String(Base64.getEncoder().encode(KEY_PAIR.getPublic().getEncoded()), US_ASCII);
+
+    static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    private static final String JWT_AUTH_HEADER = "jwt-auth";
+
+    private static final JwtAuthorizationHeaderFactory tokenFactory = new JwtAuthorizationHeaderFactory(
+        KEY_PAIR.getPrivate(),
+        CLAIM_USERNAME,
+        CLAIM_ROLES,
+        JWT_AUTH_HEADER
+    );
+
+    public static final TestSecurityConfig.AuthcDomain JWT_AUTH_DOMAIN = new TestSecurityConfig.AuthcDomain(
+        "jwt",
+        BASIC_AUTH_DOMAIN_ORDER - 1
+    ).jwtHttpAuthenticator(
+        new JwtConfigBuilder().jwtHeader(JWT_AUTH_HEADER).signingKey(PUBLIC_KEY).subjectKey(CLAIM_USERNAME).rolesKey(CLAIM_ROLES)
+    ).backend("noop");
+    public static final String SONG_ID_1 = "song-id-01";
+
+    public static final Role DEPARTMENT_SONG_LISTENER_ROLE = new Role("department-song-listener-role").indexPermissions(
+        "indices:data/read/search"
+    ).on(DEPARTMENT_SONG_INDEX_PATTERN);
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .nodeSettings(
+            Map.of("plugins.security.restapi.roles_enabled", List.of("user_" + ADMIN_USER.getName() + "__" + ALL_ACCESS.getName()))
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER)
+        .roles(DEPARTMENT_SONG_LISTENER_ROLE)
+        .authc(JWT_AUTH_DOMAIN)
+        .build();
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("com.amazon.dlic.auth.http.jwt.HTTPJwtAuthenticator");
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.prepareIndex(QA_SONG_INDEX_NAME).setId(SONG_ID_1).setRefreshPolicy(IMMEDIATE).setSource(SONGS[0].asMap()).get();
+        }
+        try (TestRestClient client = cluster.getRestClient(ADMIN_USER)) {
+            client.createRoleMapping(ROLE_VP, DEPARTMENT_SONG_LISTENER_ROLE.getName());
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateWithJwtToken_positive() {
+        try (TestRestClient client = cluster.getRestClient(tokenFactory.generateValidToken(USER_SUPERHERO))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USER_SUPERHERO));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateWithJwtToken_positiveWithAnotherUsername() {
+        try (TestRestClient client = cluster.getRestClient(tokenFactory.generateValidToken(USERNAME_ROOT))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USERNAME_ROOT));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateWithJwtToken_failureLackingUserName() {
+        try (TestRestClient client = cluster.getRestClient(tokenFactory.generateTokenWithoutPreferredUsername(USER_SUPERHERO))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            logsRule.assertThatContainExactly("No subject found in JWT token");
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateWithJwtToken_failureExpiredToken() {
+        try (TestRestClient client = cluster.getRestClient(tokenFactory.generateExpiredToken(USER_SUPERHERO))) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            logsRule.assertThatContainExactly("Invalid or expired JWT token.");
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateWithJwtToken_failureIncorrectFormatOfToken() {
+        Header header = new BasicHeader(AUTHORIZATION, "not.a.token");
+        try (TestRestClient client = cluster.getRestClient(header)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            logsRule.assertThatContainExactly(String.format("No JWT token found in '%s' header header", JWT_AUTH_HEADER));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateWithJwtToken_failureIncorrectSignature() {
+        KeyPair incorrectKeyPair = Keys.keyPairFor(SignatureAlgorithm.RS256);
+        Header header = tokenFactory.generateTokenSignedWithKey(incorrectKeyPair.getPrivate(), USER_SUPERHERO);
+        try (TestRestClient client = cluster.getRestClient(header)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            logsRule.assertThatContainExactly("Invalid or expired JWT token.");
+        }
+    }
+
+    @Test
+    public void shouldReadRolesFromToken_positiveFirstRoleSet() {
+        Header header = tokenFactory.generateValidToken(USER_SUPERHERO, ROLE_ADMIN, ROLE_DEVELOPER, ROLE_QA);
+        try (TestRestClient client = cluster.getRestClient(header)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(roles, hasSize(3));
+            assertThat(roles, containsInAnyOrder(ROLE_ADMIN, ROLE_DEVELOPER, ROLE_QA));
+        }
+    }
+
+    @Test
+    public void shouldReadRolesFromToken_positiveSecondRoleSet() {
+        Header header = tokenFactory.generateValidToken(USER_SUPERHERO, ROLE_CTO, ROLE_CEO, ROLE_VP);
+        try (TestRestClient client = cluster.getRestClient(header)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> roles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(roles, hasSize(3));
+            assertThat(roles, containsInAnyOrder(ROLE_CTO, ROLE_CEO, ROLE_VP));
+        }
+    }
+
+    @Test
+    public void shouldExposeTokenClaimsAsUserAttributes_positive() throws IOException {
+        String[] roles = { ROLE_VP };
+        Map<String, Object> additionalClaims = Map.of(CLAIM_DEPARTMENT, QA_DEPARTMENT);
+        Header header = tokenFactory.generateValidTokenWithCustomClaims(USER_SUPERHERO, roles, additionalClaims);
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(List.of(header))) {
+            SearchRequest searchRequest = queryStringQueryRequest(QA_SONG_INDEX_NAME, QUERY_TITLE_MAGNUM_OPUS);
+
+            SearchResponse response = client.search(searchRequest, DEFAULT);
+
+            assertThat(response, isSuccessfulSearchResponse());
+            assertThat(response, numberOfTotalHitsIsEqualTo(1));
+            assertThat(response, searchHitsContainDocumentWithId(0, QA_SONG_INDEX_NAME, SONG_ID_1));
+            assertThat(response, searchHitContainsFieldWithValue(0, FIELD_TITLE, TITLE_MAGNUM_OPUS));
+        }
+    }
+
+    @Test
+    public void shouldExposeTokenClaimsAsUserAttributes_negative() throws IOException {
+        String[] roles = { ROLE_VP };
+        Map<String, Object> additionalClaims = Map.of(CLAIM_DEPARTMENT, "department-without-access-to-qa-song-index");
+        Header header = tokenFactory.generateValidTokenWithCustomClaims(USER_SUPERHERO, roles, additionalClaims);
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(List.of(header))) {
+            SearchRequest searchRequest = queryStringQueryRequest(QA_SONG_INDEX_NAME, QUERY_TITLE_MAGNUM_OPUS);
+
+            assertThatThrownBy(() -> client.search(searchRequest, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/JwtAuthorizationHeaderFactory.java b/src/integrationTest/java/org/opensearch/security/http/JwtAuthorizationHeaderFactory.java
new file mode 100644
index 0000000000..dd3a165d92
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/JwtAuthorizationHeaderFactory.java
@@ -0,0 +1,141 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.security.PrivateKey;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import com.google.common.collect.ImmutableMap;
+import io.jsonwebtoken.Jwts;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.http.Header;
+import org.apache.http.message.BasicHeader;
+
+import static io.jsonwebtoken.SignatureAlgorithm.RS256;
+import static java.util.Objects.requireNonNull;
+
+class JwtAuthorizationHeaderFactory {
+    public static final String AUDIENCE = "OpenSearch";
+    public static final String ISSUER = "test-code";
+    private final PrivateKey privateKey;
+
+    private final String usernameClaimName;
+
+    private final String rolesClaimName;
+
+    private final String headerName;
+
+    public JwtAuthorizationHeaderFactory(PrivateKey privateKey, String usernameClaimName, String rolesClaimName, String headerName) {
+        this.privateKey = requireNonNull(privateKey, "Private key is required");
+        this.usernameClaimName = requireNonNull(usernameClaimName, "Username claim name is required");
+        this.rolesClaimName = requireNonNull(rolesClaimName, "Roles claim name is required.");
+        this.headerName = requireNonNull(headerName, "Header name is required");
+    }
+
+    Header generateValidToken(String username, String... roles) {
+        requireNonNull(username, "Username is required");
+        Date now = new Date();
+        String token = Jwts.builder()
+            .setClaims(customClaimsMap(username, roles))
+            .setIssuer(ISSUER)
+            .setSubject(subject(username))
+            .setAudience(AUDIENCE)
+            .setIssuedAt(now)
+            .setExpiration(new Date(now.getTime() + 3600 * 1000))
+            .signWith(privateKey, RS256)
+            .compact();
+        return toHeader(token);
+    }
+
+    private Map<String, Object> customClaimsMap(String username, String[] roles) {
+        ImmutableMap.Builder<String, Object> builder = new ImmutableMap.Builder();
+        if (StringUtils.isNoneEmpty(username)) {
+            builder.put(usernameClaimName, username);
+        }
+        if ((roles != null) && (roles.length > 0)) {
+            builder.put(rolesClaimName, Arrays.stream(roles).collect(Collectors.joining(",")));
+        }
+        return builder.build();
+    }
+
+    Header generateValidTokenWithCustomClaims(String username, String[] roles, Map<String, Object> additionalClaims) {
+        requireNonNull(username, "Username is required");
+        requireNonNull(additionalClaims, "Custom claims are required");
+        Map<String, Object> claims = new HashMap<>(customClaimsMap(username, roles));
+        claims.putAll(additionalClaims);
+        Date now = new Date();
+        String token = Jwts.builder()
+            .setClaims(claims)
+            .setIssuer(ISSUER)
+            .setSubject(subject(username))
+            .setAudience(AUDIENCE)
+            .setIssuedAt(now)
+            .setExpiration(new Date(now.getTime() + 3600 * 1000))
+            .signWith(privateKey, RS256)
+            .compact();
+        return toHeader(token);
+    }
+
+    private BasicHeader toHeader(String token) {
+        return new BasicHeader(headerName, token);
+    }
+
+    Header generateTokenWithoutPreferredUsername(String username) {
+        requireNonNull(username, "Username is required");
+        Date now = new Date();
+        String token = Jwts.builder()
+            .setIssuer(ISSUER)
+            .setSubject(username)
+            .setIssuedAt(now)
+            .setExpiration(new Date(now.getTime() + 3600 * 1000))
+            .signWith(privateKey, RS256)
+            .compact();
+        return toHeader(token);
+    }
+
+    public Header generateExpiredToken(String username) {
+        requireNonNull(username, "Username is required");
+        Date now = new Date(1000);
+        String token = Jwts.builder()
+            .setClaims(Map.of(usernameClaimName, username))
+            .setIssuer(ISSUER)
+            .setSubject(subject(username))
+            .setAudience(AUDIENCE)
+            .setIssuedAt(now)
+            .setExpiration(new Date(now.getTime() + 3600 * 1000))
+            .signWith(privateKey, RS256)
+            .compact();
+        return toHeader(token);
+    }
+
+    public Header generateTokenSignedWithKey(PrivateKey key, String username) {
+        requireNonNull(key, "Private key is required");
+        requireNonNull(username, "Username is required");
+        Date now = new Date();
+        String token = Jwts.builder()
+            .setClaims(Map.of(usernameClaimName, username))
+            .setIssuer(ISSUER)
+            .setSubject(subject(username))
+            .setAudience(AUDIENCE)
+            .setIssuedAt(now)
+            .setExpiration(new Date(now.getTime() + 3600 * 1000))
+            .signWith(key, RS256)
+            .compact();
+        return toHeader(token);
+    }
+
+    private static String subject(String username) {
+        return "subject-" + username;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/LdapAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/LdapAuthenticationTest.java
new file mode 100644
index 0000000000..299b2cc7d2
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/LdapAuthenticationTest.java
@@ -0,0 +1,120 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.util.List;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.RuleChain;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.LdapAuthenticationConfigBuilder;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AuthenticationBackend;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.ldap.EmbeddedLDAPServer;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_OPEN_SEARCH_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.LDIF_DATA;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_OPEN_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_SPOCK;
+import static org.opensearch.security.http.DirectoryInformationTrees.USERNAME_ATTRIBUTE;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SPOCK;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.BASIC_AUTH_DOMAIN_ORDER;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+/**
+* Test uses plain (non TLS) connection between OpenSearch and LDAP server.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class LdapAuthenticationTest {
+
+    private static final Logger log = LogManager.getLogger(LdapAuthenticationTest.class);
+
+    private static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    private static final TestCertificates TEST_CERTIFICATES = new TestCertificates();
+
+    public static final EmbeddedLDAPServer embeddedLDAPServer = new EmbeddedLDAPServer(
+        TEST_CERTIFICATES.getRootCertificateData(),
+        TEST_CERTIFICATES.getLdapCertificateData(),
+        LDIF_DATA
+    );
+
+    public static LocalCluster cluster = new LocalCluster.Builder().testCertificates(TEST_CERTIFICATES)
+        .clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authc(
+            new AuthcDomain("ldap", BASIC_AUTH_DOMAIN_ORDER + 1, true).httpAuthenticator(new HttpAuthenticator("basic").challenge(false))
+                .backend(
+                    new AuthenticationBackend("ldap").config(
+                        () -> LdapAuthenticationConfigBuilder.config()
+                            // this port is available when embeddedLDAPServer is already started, therefore Supplier interface is used to
+                            // postpone
+                            // execution of the code in this block.
+                            .enableSsl(false)
+                            .enableStartTls(false)
+                            .hosts(List.of("localhost:" + embeddedLDAPServer.getLdapNonTlsPort()))
+                            .bindDn(DN_OPEN_SEARCH_PEOPLE_TEST_ORG)
+                            .password(PASSWORD_OPEN_SEARCH)
+                            .userBase(DN_PEOPLE_TEST_ORG)
+                            .userSearch(USER_SEARCH)
+                            .usernameAttribute(USERNAME_ATTRIBUTE)
+                            .build()
+                    )
+                )
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER)
+        .build();
+
+    @ClassRule
+    public static RuleChain ruleChain = RuleChain.outerRule(embeddedLDAPServer).around(cluster);
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("com.amazon.dlic.auth.ldap.backend.LDAPAuthenticationBackend");
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_positive() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, PASSWORD_SPOCK)) {
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_negativeWhenIncorrectPassword() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, "incorrect password")) {
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            String expectedStackTraceFragment = "Unable to bind as user '".concat(DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG)
+                .concat("' because the provided password was incorrect.");
+            logsRule.assertThatStackTraceContain(expectedStackTraceFragment);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/LdapStartTlsAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/LdapStartTlsAuthenticationTest.java
new file mode 100644
index 0000000000..395467897d
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/LdapStartTlsAuthenticationTest.java
@@ -0,0 +1,118 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.util.List;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.RuleChain;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.LdapAuthenticationConfigBuilder;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AuthenticationBackend;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.ldap.EmbeddedLDAPServer;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_OPEN_SEARCH_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.LDIF_DATA;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_OPEN_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_SPOCK;
+import static org.opensearch.security.http.DirectoryInformationTrees.USERNAME_ATTRIBUTE;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SPOCK;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.BASIC_AUTH_DOMAIN_ORDER;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+/**
+* Test initiates plain (non-TLS) connection between OpenSearch and LDAP server and then in the course of the test connection is upgraded
+* to TLS.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class LdapStartTlsAuthenticationTest {
+
+    private static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    private static final TestCertificates TEST_CERTIFICATES = new TestCertificates();
+
+    public static final EmbeddedLDAPServer embeddedLDAPServer = new EmbeddedLDAPServer(
+        TEST_CERTIFICATES.getRootCertificateData(),
+        TEST_CERTIFICATES.getLdapCertificateData(),
+        LDIF_DATA
+    );
+
+    public static LocalCluster cluster = new LocalCluster.Builder().testCertificates(TEST_CERTIFICATES)
+        .clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authc(
+            new AuthcDomain("ldap-config-id", BASIC_AUTH_DOMAIN_ORDER + 1, true).httpAuthenticator(
+                new HttpAuthenticator("basic").challenge(false)
+            )
+                .backend(
+                    new AuthenticationBackend("ldap").config(
+                        () -> LdapAuthenticationConfigBuilder.config()
+                            // this port is available when embeddedLDAPServer is already started, therefore Supplier interface is used
+                            .hosts(List.of("localhost:" + embeddedLDAPServer.getLdapNonTlsPort()))
+                            .enableSsl(false)
+                            .enableStartTls(true)
+                            .bindDn(DN_OPEN_SEARCH_PEOPLE_TEST_ORG)
+                            .password(PASSWORD_OPEN_SEARCH)
+                            .userBase(DN_PEOPLE_TEST_ORG)
+                            .userSearch(USER_SEARCH)
+                            .usernameAttribute(USERNAME_ATTRIBUTE)
+                            .penTrustedCasFilePath(TEST_CERTIFICATES.getRootCertificate().getAbsolutePath())
+                            .build()
+                    )
+                )
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER)
+        .build();
+
+    @ClassRule
+    public static RuleChain ruleChain = RuleChain.outerRule(embeddedLDAPServer).around(cluster);
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("com.amazon.dlic.auth.ldap.backend.LDAPAuthenticationBackend");
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_positive() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, PASSWORD_SPOCK)) {
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_negativeWhenIncorrectPassword() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, "incorrect password")) {
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            String expectedStackTraceFragment = "Unable to bind as user '".concat(DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG)
+                .concat("' because the provided password was incorrect.");
+            logsRule.assertThatStackTraceContain(expectedStackTraceFragment);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/LdapTlsAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/LdapTlsAuthenticationTest.java
new file mode 100644
index 0000000000..f00007e4fc
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/LdapTlsAuthenticationTest.java
@@ -0,0 +1,414 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.message.BasicHeader;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.RuleChain;
+import org.junit.runner.RunWith;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.client.Client;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.test.framework.AuthorizationBackend;
+import org.opensearch.test.framework.AuthzDomain;
+import org.opensearch.test.framework.LdapAuthenticationConfigBuilder;
+import org.opensearch.test.framework.LdapAuthorizationConfigBuilder;
+import org.opensearch.test.framework.RolesMapping;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AuthenticationBackend;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.cluster.TestRestClient.HttpResponse;
+import org.opensearch.test.framework.ldap.EmbeddedLDAPServer;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.not;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+import static org.opensearch.client.RequestOptions.DEFAULT;
+import static org.opensearch.core.rest.RestStatus.FORBIDDEN;
+import static org.opensearch.security.Song.SONGS;
+import static org.opensearch.security.http.DirectoryInformationTrees.CN_GROUP_ADMIN;
+import static org.opensearch.security.http.DirectoryInformationTrees.CN_GROUP_BRIDGE;
+import static org.opensearch.security.http.DirectoryInformationTrees.CN_GROUP_CREW;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_GROUPS_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_OPEN_SEARCH_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.LDIF_DATA;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_JEAN;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_KIRK;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_LEONARD;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_OPEN_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_SPOCK;
+import static org.opensearch.security.http.DirectoryInformationTrees.USERNAME_ATTRIBUTE;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_JEAN;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_KIRK;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_LEONARD;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SPOCK;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.BASIC_AUTH_DOMAIN_ORDER;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+import static org.opensearch.test.framework.cluster.SearchRequestFactory.queryStringQueryRequest;
+import static org.opensearch.test.framework.matcher.ExceptionMatcherAssert.assertThatThrownBy;
+import static org.opensearch.test.framework.matcher.OpenSearchExceptionMatchers.statusException;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.isSuccessfulSearchResponse;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.numberOfTotalHitsIsEqualTo;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.searchHitsContainDocumentWithId;
+
+/**
+* Test uses plain TLS connection between OpenSearch and LDAP server.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class LdapTlsAuthenticationTest {
+
+    private static final String SONG_INDEX_NAME = "song_lyrics";
+
+    private static final String HEADER_NAME_IMPERSONATE = "opendistro_security_impersonate_as";
+
+    private static final String PERSONAL_INDEX_NAME_SPOCK = "personal-" + USER_SPOCK;
+    private static final String PERSONAL_INDEX_NAME_KIRK = "personal-" + USER_KIRK;
+
+    private static final String POINTER_BACKEND_ROLES = "/backend_roles";
+    private static final String POINTER_ROLES = "/roles";
+    private static final String POINTER_USERNAME = "/user_name";
+    private static final String POINTER_ERROR_REASON = "/error/reason";
+
+    private static final String SONG_ID_1 = "l0001";
+    private static final String SONG_ID_2 = "l0002";
+    private static final String SONG_ID_3 = "l0003";
+
+    private static final User ADMIN_USER = new User("admin").roles(ALL_ACCESS);
+
+    private static final TestCertificates TEST_CERTIFICATES = new TestCertificates();
+
+    private static final Role ROLE_INDEX_ADMINISTRATOR = new Role("index_administrator").indexPermissions("*").on("*");
+    private static final Role ROLE_PERSONAL_INDEX_ACCESS = new Role("personal_index_access").indexPermissions("*")
+        .on("personal-${attr.ldap.uid}");
+
+    private static final EmbeddedLDAPServer embeddedLDAPServer = new EmbeddedLDAPServer(
+        TEST_CERTIFICATES.getRootCertificateData(),
+        TEST_CERTIFICATES.getLdapCertificateData(),
+        LDIF_DATA
+    );
+
+    private static final Map<String, Object> USER_IMPERSONATION_CONFIGURATION = Map.of(
+        "plugins.security.authcz.rest_impersonation_user." + USER_KIRK,
+        List.of(USER_SPOCK)
+    );
+
+    private static final LocalCluster cluster = new LocalCluster.Builder().testCertificates(TEST_CERTIFICATES)
+        .clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .nodeSettings(USER_IMPERSONATION_CONFIGURATION)
+        .authc(
+            new AuthcDomain("ldap", BASIC_AUTH_DOMAIN_ORDER + 1, true).httpAuthenticator(new HttpAuthenticator("basic").challenge(false))
+                .backend(
+                    new AuthenticationBackend("ldap").config(
+                        () -> LdapAuthenticationConfigBuilder.config()
+                            // this port is available when embeddedLDAPServer is already started, therefore Supplier interface is used
+                            .hosts(List.of("localhost:" + embeddedLDAPServer.getLdapTlsPort()))
+                            .enableSsl(true)
+                            .bindDn(DN_OPEN_SEARCH_PEOPLE_TEST_ORG)
+                            .password(PASSWORD_OPEN_SEARCH)
+                            .userBase(DN_PEOPLE_TEST_ORG)
+                            .userSearch(USER_SEARCH)
+                            .usernameAttribute(USERNAME_ATTRIBUTE)
+                            .penTrustedCasFilePath(TEST_CERTIFICATES.getRootCertificate().getAbsolutePath())
+                            .build()
+                    )
+                )
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER)
+        .roles(ROLE_INDEX_ADMINISTRATOR, ROLE_PERSONAL_INDEX_ACCESS)
+        .rolesMapping(
+            new RolesMapping(ROLE_INDEX_ADMINISTRATOR).backendRoles(CN_GROUP_ADMIN),
+            new RolesMapping(ROLE_PERSONAL_INDEX_ACCESS).backendRoles(CN_GROUP_CREW)
+        )
+        .authz(
+            new AuthzDomain("ldap_roles").httpEnabled(true)
+                .transportEnabled(true)
+                .authorizationBackend(
+                    new AuthorizationBackend("ldap").config(
+                        () -> new LdapAuthorizationConfigBuilder().hosts(List.of("localhost:" + embeddedLDAPServer.getLdapTlsPort()))
+                            .enableSsl(true)
+                            .bindDn(DN_OPEN_SEARCH_PEOPLE_TEST_ORG)
+                            .password(PASSWORD_OPEN_SEARCH)
+                            .userBase(DN_PEOPLE_TEST_ORG)
+                            .userSearch(USER_SEARCH)
+                            .usernameAttribute(USERNAME_ATTRIBUTE)
+                            .penTrustedCasFilePath(TEST_CERTIFICATES.getRootCertificate().getAbsolutePath())
+                            .roleBase(DN_GROUPS_TEST_ORG)
+                            .roleSearch("(uniqueMember={0})")
+                            .userRoleAttribute(null)
+                            .userRoleName("disabled")
+                            .roleName("cn")
+                            .resolveNestedRoles(true)
+                            .build()
+                    )
+                )
+        )
+        .build();
+
+    @ClassRule
+    public static final RuleChain ruleChain = RuleChain.outerRule(embeddedLDAPServer).around(cluster);
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("com.amazon.dlic.auth.ldap.backend.LDAPAuthenticationBackend");
+
+    @BeforeClass
+    public static void createTestData() {
+        try (Client client = cluster.getInternalNodeClient()) {
+            client.prepareIndex(SONG_INDEX_NAME).setId(SONG_ID_1).setRefreshPolicy(IMMEDIATE).setSource(SONGS[0].asMap()).get();
+            client.prepareIndex(PERSONAL_INDEX_NAME_SPOCK).setId(SONG_ID_2).setRefreshPolicy(IMMEDIATE).setSource(SONGS[1].asMap()).get();
+            client.prepareIndex(PERSONAL_INDEX_NAME_KIRK).setId(SONG_ID_3).setRefreshPolicy(IMMEDIATE).setSource(SONGS[2].asMap()).get();
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_positiveSpockUser() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, PASSWORD_SPOCK)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USER_SPOCK));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_positiveKirkUser() {
+        try (TestRestClient client = cluster.getRestClient(USER_KIRK, PASSWORD_KIRK)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            String username = response.getTextFromJsonBody(POINTER_USERNAME);
+            assertThat(username, equalTo(USER_KIRK));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_negativeWhenIncorrectPassword() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, "incorrect password")) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            String expectedStackTraceFragment = "Unable to bind as user '".concat(DN_CAPTAIN_SPOCK_PEOPLE_TEST_ORG)
+                .concat("' because the provided password was incorrect.");
+            logsRule.assertThatStackTraceContain(expectedStackTraceFragment);
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_negativeWhenIncorrectUsername() {
+        final String username = "invalid-user-name";
+        try (TestRestClient client = cluster.getRestClient(username, PASSWORD_SPOCK)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            logsRule.assertThatStackTraceContain(String.format("No user %s found", username));
+        }
+    }
+
+    @Test
+    public void shouldAuthenticateUserWithLdap_negativeWhenUserDoesNotExist() {
+        final String username = "doesNotExist";
+        try (TestRestClient client = cluster.getRestClient(username, "password")) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+            logsRule.assertThatStackTraceContain(String.format("No user %s found", username));
+        }
+    }
+
+    @Test
+    public void shouldResolveUserRolesAgainstLdapBackend_positiveSpockUser() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, PASSWORD_SPOCK)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, contains(CN_GROUP_CREW));
+            assertThat(response.getTextArrayFromJsonBody(POINTER_ROLES), contains(ROLE_PERSONAL_INDEX_ACCESS.getName()));
+        }
+    }
+
+    @Test
+    public void shouldResolveUserRolesAgainstLdapBackend_positiveKirkUser() {
+        try (TestRestClient client = cluster.getRestClient(USER_KIRK, PASSWORD_KIRK)) {
+
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            assertThat(response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES), contains(CN_GROUP_ADMIN));
+            assertThat(response.getTextArrayFromJsonBody(POINTER_ROLES), contains(ROLE_INDEX_ADMINISTRATOR.getName()));
+        }
+    }
+
+    @Test
+    public void shouldPerformAuthorizationAgainstLdapToAccessIndex_positive() throws IOException {
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(USER_KIRK, PASSWORD_KIRK)) {
+            SearchRequest request = queryStringQueryRequest(SONG_INDEX_NAME, "*");
+
+            SearchResponse searchResponse = client.search(request, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, SONG_INDEX_NAME, SONG_ID_1));
+        }
+    }
+
+    @Test
+    public void shouldPerformAuthorizationAgainstLdapToAccessIndex_negative() throws IOException {
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(USER_LEONARD, PASSWORD_LEONARD)) {
+            SearchRequest request = queryStringQueryRequest(SONG_INDEX_NAME, "*");
+
+            assertThatThrownBy(() -> client.search(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldResolveUserAttributesLoadedFromLdap_positive() throws IOException {
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(USER_SPOCK, PASSWORD_SPOCK)) {
+            SearchRequest request = queryStringQueryRequest(PERSONAL_INDEX_NAME_SPOCK, "*");
+
+            SearchResponse searchResponse = client.search(request, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, PERSONAL_INDEX_NAME_SPOCK, SONG_ID_2));
+        }
+    }
+
+    @Test
+    public void shouldResolveUserAttributesLoadedFromLdap_negative() throws IOException {
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(USER_SPOCK, PASSWORD_SPOCK)) {
+            SearchRequest request = queryStringQueryRequest(PERSONAL_INDEX_NAME_KIRK, "*");
+
+            assertThatThrownBy(() -> client.search(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void shouldResolveNestedGroups_positive() {
+        try (TestRestClient client = cluster.getRestClient(USER_JEAN, PASSWORD_JEAN)) {
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(2));
+            // CN_GROUP_CREW is retrieved recursively: cn=Jean,ou=people,o=test.org -> cn=bridge,ou=groups,o=test.org ->
+            // cn=crew,ou=groups,o=test.org
+            assertThat(backendRoles, containsInAnyOrder(CN_GROUP_CREW, CN_GROUP_BRIDGE));
+            assertThat(response.getTextArrayFromJsonBody(POINTER_ROLES), contains(ROLE_PERSONAL_INDEX_ACCESS.getName()));
+        }
+    }
+
+    @Test
+    public void shouldResolveNestedGroups_negative() {
+        try (TestRestClient client = cluster.getRestClient(USER_KIRK, PASSWORD_KIRK)) {
+            HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(200);
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, not(containsInAnyOrder(CN_GROUP_CREW)));
+        }
+    }
+
+    @Test
+    public void shouldImpersonateUser_positive() {
+        try (TestRestClient client = cluster.getRestClient(USER_KIRK, PASSWORD_KIRK)) {
+
+            HttpResponse response = client.getAuthInfo(new BasicHeader(HEADER_NAME_IMPERSONATE, USER_SPOCK));
+
+            response.assertStatusCode(200);
+            assertThat(response.getTextFromJsonBody(POINTER_USERNAME), equalTo(USER_SPOCK));
+            List<String> backendRoles = response.getTextArrayFromJsonBody(POINTER_BACKEND_ROLES);
+            assertThat(backendRoles, hasSize(1));
+            assertThat(backendRoles, contains(CN_GROUP_CREW));
+        }
+    }
+
+    @Test
+    public void shouldImpersonateUser_negativeJean() {
+        try (TestRestClient client = cluster.getRestClient(USER_KIRK, PASSWORD_KIRK)) {
+
+            HttpResponse response = client.getAuthInfo(new BasicHeader(HEADER_NAME_IMPERSONATE, USER_JEAN));
+
+            response.assertStatusCode(403);
+            String expectedMessage = String.format("'%s' is not allowed to impersonate as '%s'", USER_KIRK, USER_JEAN);
+            assertThat(response.getTextFromJsonBody(POINTER_ERROR_REASON), equalTo(expectedMessage));
+        }
+    }
+
+    @Test
+    public void shouldImpersonateUser_negativeKirk() {
+        try (TestRestClient client = cluster.getRestClient(USER_JEAN, PASSWORD_JEAN)) {
+
+            HttpResponse response = client.getAuthInfo(new BasicHeader(HEADER_NAME_IMPERSONATE, USER_KIRK));
+
+            response.assertStatusCode(403);
+            String expectedMessage = String.format("'%s' is not allowed to impersonate as '%s'", USER_JEAN, USER_KIRK);
+            assertThat(response.getTextFromJsonBody(POINTER_ERROR_REASON), equalTo(expectedMessage));
+        }
+    }
+
+    @Test
+    public void shouldAccessImpersonatedUserPersonalIndex_positive() throws IOException {
+        BasicHeader impersonateHeader = new BasicHeader(HEADER_NAME_IMPERSONATE, USER_SPOCK);
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(USER_KIRK, PASSWORD_KIRK, impersonateHeader)) {
+            SearchRequest request = queryStringQueryRequest(PERSONAL_INDEX_NAME_SPOCK, "*");
+
+            SearchResponse searchResponse = client.search(request, DEFAULT);
+
+            assertThat(searchResponse, isSuccessfulSearchResponse());
+            assertThat(searchResponse, numberOfTotalHitsIsEqualTo(1));
+            assertThat(searchResponse, searchHitsContainDocumentWithId(0, PERSONAL_INDEX_NAME_SPOCK, SONG_ID_2));
+        }
+    }
+
+    @Test
+    public void shouldAccessImpersonatedUserPersonalIndex_negative() throws IOException {
+        BasicHeader impersonateHeader = new BasicHeader(HEADER_NAME_IMPERSONATE, USER_SPOCK);
+        try (RestHighLevelClient client = cluster.getRestHighLevelClient(USER_KIRK, PASSWORD_KIRK, impersonateHeader)) {
+            SearchRequest request = queryStringQueryRequest(PERSONAL_INDEX_NAME_KIRK, "*");
+
+            assertThatThrownBy(() -> client.search(request, DEFAULT), statusException(FORBIDDEN));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/ProxyAuthenticationTest.java b/src/integrationTest/java/org/opensearch/security/http/ProxyAuthenticationTest.java
new file mode 100644
index 0000000000..8d9ede8e5a
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/ProxyAuthenticationTest.java
@@ -0,0 +1,128 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.io.IOException;
+import java.util.Map;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AuthenticationBackend;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.XffConfig;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+
+/**
+* Class used to run tests defined in the supper class against OpenSearch cluster with configured <code>proxy</code> authentication.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class ProxyAuthenticationTest extends CommonProxyAuthenticationTests {
+
+    private static final Map<String, Object> PROXY_AUTHENTICATOR_CONFIG = Map.of(
+        "user_header",
+        HEADER_PROXY_USER,
+        "roles_header",
+        HEADER_PROXY_ROLES
+    );
+
+    @ClassRule
+    public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .xff(new XffConfig(true).internalProxiesRegexp("127\\.0\\.0\\.10"))
+        .authc(
+            new AuthcDomain("proxy_auth_domain", -5, true).httpAuthenticator(
+                new HttpAuthenticator("proxy").challenge(false).config(PROXY_AUTHENTICATOR_CONFIG)
+            ).backend(new AuthenticationBackend("noop"))
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(USER_ADMIN)
+        .roles(ROLE_ALL_INDEX_SEARCH, ROLE_PERSONAL_INDEX_SEARCH)
+        .rolesMapping(ROLES_MAPPING_CAPTAIN, ROLES_MAPPING_FIRST_MATE)
+        .build();
+
+    @Override
+    protected LocalCluster getCluster() {
+        return cluster;
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithBasicAuthWhenProxyAuthenticationIsConfigured() {
+        super.shouldAuthenticateWithBasicAuthWhenProxyAuthenticationIsConfigured();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_positiveUserKirk() throws IOException {
+        super.shouldAuthenticateWithProxy_positiveUserKirk();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_positiveUserSpock() throws IOException {
+        super.shouldAuthenticateWithProxy_positiveUserSpock();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_negativeWhenXffHeaderIsMissing() throws IOException {
+        super.shouldAuthenticateWithProxy_negativeWhenXffHeaderIsMissing();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_negativeWhenUserNameHeaderIsMissing() throws IOException {
+        super.shouldAuthenticateWithProxy_negativeWhenUserNameHeaderIsMissing();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxyWhenRolesHeaderIsMissing() throws IOException {
+        super.shouldAuthenticateWithProxyWhenRolesHeaderIsMissing();
+    }
+
+    @Test
+    @Override
+    public void shouldAuthenticateWithProxy_negativeWhenRequestWasNotSendByProxy() throws IOException {
+        super.shouldAuthenticateWithProxy_negativeWhenRequestWasNotSendByProxy();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveEmptyListOfRoles() throws IOException {
+        super.shouldRetrieveEmptyListOfRoles();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveSingleRoleFirstMate() throws IOException {
+        super.shouldRetrieveSingleRoleFirstMate();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveSingleRoleCaptain() throws IOException {
+        super.shouldRetrieveSingleRoleCaptain();
+    }
+
+    @Test
+    @Override
+    public void shouldRetrieveMultipleRoles() throws IOException {
+        super.shouldRetrieveMultipleRoles();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/http/UntrustedLdapServerCertificateTest.java b/src/integrationTest/java/org/opensearch/security/http/UntrustedLdapServerCertificateTest.java
new file mode 100644
index 0000000000..10e3f0853f
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/http/UntrustedLdapServerCertificateTest.java
@@ -0,0 +1,104 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.security.http;
+
+import java.util.List;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.RuleChain;
+import org.junit.runner.RunWith;
+
+import org.opensearch.test.framework.LdapAuthenticationConfigBuilder;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AuthenticationBackend;
+import org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.HttpAuthenticator;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.ldap.EmbeddedLDAPServer;
+import org.opensearch.test.framework.log.LogsRule;
+
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_OPEN_SEARCH_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.DN_PEOPLE_TEST_ORG;
+import static org.opensearch.security.http.DirectoryInformationTrees.LDIF_DATA;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_OPEN_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.PASSWORD_SPOCK;
+import static org.opensearch.security.http.DirectoryInformationTrees.USERNAME_ATTRIBUTE;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SEARCH;
+import static org.opensearch.security.http.DirectoryInformationTrees.USER_SPOCK;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.BASIC_AUTH_DOMAIN_ORDER;
+import static org.opensearch.test.framework.TestSecurityConfig.Role.ALL_ACCESS;
+
+/**
+* Negative test case related to LDAP server certificate. Connection between OpenSearch and LDAP server should not be established if
+* OpenSearch "does not trust" LDAP server certificate.
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class UntrustedLdapServerCertificateTest {
+
+    private static final TestSecurityConfig.User ADMIN_USER = new TestSecurityConfig.User("admin").roles(ALL_ACCESS);
+
+    private static final TestCertificates TEST_CERTIFICATES = new TestCertificates();
+
+    public static final EmbeddedLDAPServer embeddedLDAPServer = new EmbeddedLDAPServer(
+        TEST_CERTIFICATES.getRootCertificateData(),
+        TEST_CERTIFICATES.createSelfSignedCertificate("CN=untrusted"),
+        LDIF_DATA
+    );
+
+    public static LocalCluster cluster = new LocalCluster.Builder().testCertificates(TEST_CERTIFICATES)
+        .clusterManager(ClusterManager.SINGLENODE)
+        .anonymousAuth(false)
+        .authc(
+            new AuthcDomain("ldap", BASIC_AUTH_DOMAIN_ORDER + 1, true).httpAuthenticator(new HttpAuthenticator("basic").challenge(false))
+                .backend(
+                    new AuthenticationBackend("ldap").config(
+                        () -> LdapAuthenticationConfigBuilder.config()
+                            // this port is available when embeddedLDAPServer is already started, therefore Supplier interface is used
+                            .hosts(List.of("localhost:" + embeddedLDAPServer.getLdapTlsPort()))
+                            .enableSsl(true)
+                            .bindDn(DN_OPEN_SEARCH_PEOPLE_TEST_ORG)
+                            .password(PASSWORD_OPEN_SEARCH)
+                            .userBase(DN_PEOPLE_TEST_ORG)
+                            .userSearch(USER_SEARCH)
+                            .usernameAttribute(USERNAME_ATTRIBUTE)
+                            .penTrustedCasFilePath(TEST_CERTIFICATES.getRootCertificate().getAbsolutePath())
+                            .build()
+                    )
+                )
+        )
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(ADMIN_USER)
+        .build();
+
+    @ClassRule
+    public static RuleChain ruleChain = RuleChain.outerRule(embeddedLDAPServer).around(cluster);
+
+    @Rule
+    public LogsRule logsRule = new LogsRule("com.amazon.dlic.auth.ldap.backend.LDAPAuthenticationBackend");
+
+    @Test
+    public void shouldNotAuthenticateUserWithLdap() {
+        try (TestRestClient client = cluster.getRestClient(USER_SPOCK, PASSWORD_SPOCK)) {
+            TestRestClient.HttpResponse response = client.getAuthInfo();
+
+            response.assertStatusCode(401);
+        }
+        logsRule.assertThatStackTraceContain("javax.net.ssl.SSLHandshakeException");
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java
new file mode 100644
index 0000000000..302d2ad1d7
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java
@@ -0,0 +1,121 @@
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.security.privileges;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.HttpStatus;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import org.opensearch.script.mustache.MustachePlugin;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+
+/**
+* This is a port for the test
+* org.opensearch.security.privileges.PrivilegesEvaluatorTest to the new test
+* framework for direct comparison
+*/
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class PrivilegesEvaluatorTest {
+
+    protected final static TestSecurityConfig.User NEGATIVE_LOOKAHEAD = new TestSecurityConfig.User("negative_lookahead_user").roles(
+        new Role("negative_lookahead_role").indexPermissions("read").on("/^(?!t.*).*/").clusterPermissions("cluster_composite_ops")
+    );
+
+    protected final static TestSecurityConfig.User NEGATED_REGEX = new TestSecurityConfig.User("negated_regex_user").roles(
+        new Role("negated_regex_role").indexPermissions("read").on("/^[a-z].*/").clusterPermissions("cluster_composite_ops")
+    );
+
+    protected final static TestSecurityConfig.User SEARCH_TEMPLATE = new TestSecurityConfig.User("search_template_user").roles(
+        new Role("search_template_role").indexPermissions("read").on("services").clusterPermissions("cluster_composite_ops")
+    );
+
+    private String TEST_QUERY =
+        "{\"source\":{\"query\":{\"match\":{\"service\":\"{{service_name}}\"}}},\"params\":{\"service_name\":\"Oracle\"}}";
+
+    private String TEST_DOC = "{\"source\": {\"title\": \"Spirited Away\"}}";
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(NEGATIVE_LOOKAHEAD, NEGATED_REGEX, SEARCH_TEMPLATE, TestSecurityConfig.User.USER_ADMIN)
+        .plugin(MustachePlugin.class)
+        .build();
+
+    @Test
+    public void testNegativeLookaheadPattern() throws Exception {
+
+        try (TestRestClient client = cluster.getRestClient(NEGATIVE_LOOKAHEAD)) {
+            assertThat(client.get("*/_search").getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+            assertThat(client.get("r*/_search").getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+    }
+
+    @Test
+    public void testRegexPattern() throws Exception {
+
+        try (TestRestClient client = cluster.getRestClient(NEGATED_REGEX)) {
+            assertThat(client.get("*/_search").getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+            assertThat(client.get("r*/_search").getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+
+    }
+
+    @Test
+    public void testSearchTemplateRequestSuccess() {
+        // Insert doc into services index with admin user
+        try (TestRestClient client = cluster.getRestClient(TestSecurityConfig.User.USER_ADMIN)) {
+            TestRestClient.HttpResponse response = client.postJson("services/_doc", TEST_DOC);
+            assertThat(response.getStatusCode(), equalTo(HttpStatus.SC_CREATED));
+        }
+
+        try (TestRestClient client = cluster.getRestClient(SEARCH_TEMPLATE)) {
+            final String searchTemplateOnServicesIndex = "services/_search/template";
+            final TestRestClient.HttpResponse searchTemplateOnAuthorizedIndexResponse = client.postJson(
+                searchTemplateOnServicesIndex,
+                TEST_QUERY
+            );
+            assertThat(searchTemplateOnAuthorizedIndexResponse.getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+    }
+
+    @Test
+    public void testSearchTemplateRequestUnauthorizedIndex() {
+        try (TestRestClient client = cluster.getRestClient(SEARCH_TEMPLATE)) {
+            final String searchTemplateOnMoviesIndex = "movies/_search/template";
+            final TestRestClient.HttpResponse searchTemplateOnUnauthorizedIndexResponse = client.postJson(
+                searchTemplateOnMoviesIndex,
+                TEST_QUERY
+            );
+            assertThat(searchTemplateOnUnauthorizedIndexResponse.getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+        }
+    }
+
+    @Test
+    public void testSearchTemplateRequestUnauthorizedAllIndices() {
+        try (TestRestClient client = cluster.getRestClient(SEARCH_TEMPLATE)) {
+            final String searchTemplateOnAllIndices = "_search/template";
+            final TestRestClient.HttpResponse searchOnAllIndicesResponse = client.postJson(searchTemplateOnAllIndices, TEST_QUERY);
+            assertThat(searchOnAllIndicesResponse.getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/rest/AuthZinRestLayerTests.java b/src/integrationTest/java/org/opensearch/security/rest/AuthZinRestLayerTests.java
new file mode 100644
index 0000000000..f7aa1e1b78
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/rest/AuthZinRestLayerTests.java
@@ -0,0 +1,243 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.security.rest;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import org.apache.http.HttpStatus;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.opensearch.test.framework.AuditCompliance;
+import org.opensearch.test.framework.AuditConfiguration;
+import org.opensearch.test.framework.AuditFilters;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.audit.AuditLogsRule;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+import org.opensearch.test.framework.testplugins.dummy.CustomLegacyTestPlugin;
+import org.opensearch.test.framework.testplugins.dummyprotected.CustomRestProtectedTestPlugin;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.opensearch.rest.RestRequest.Method.GET;
+import static org.opensearch.rest.RestRequest.Method.POST;
+import static org.opensearch.security.auditlog.impl.AuditCategory.FAILED_LOGIN;
+import static org.opensearch.security.auditlog.impl.AuditCategory.GRANTED_PRIVILEGES;
+import static org.opensearch.security.auditlog.impl.AuditCategory.MISSING_PRIVILEGES;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.privilegePredicateRESTLayer;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.privilegePredicateTransportLayer;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class AuthZinRestLayerTests {
+    protected final static TestSecurityConfig.User REST_ONLY = new TestSecurityConfig.User("rest_only").roles(
+        new Role("rest_only_role").clusterPermissions("security:dummy_protected/get").clusterPermissions("cluster:admin/dummy_plugin/dummy")
+    );
+
+    protected final static TestSecurityConfig.User TRANSPORT_ONLY = new TestSecurityConfig.User("transport_only").roles(
+        new Role("transport_only_role").clusterPermissions("cluster:admin/dummy_plugin/dummy")
+    );
+
+    protected final static TestSecurityConfig.User REST_PLUS_TRANSPORT = new TestSecurityConfig.User("rest_plus_transport").roles(
+        new Role("rest_plus_transport_role").clusterPermissions("security:dummy_protected/get")
+            .clusterPermissions("cluster:admin/dummy_plugin/dummy", "cluster:admin/dummy_protected_plugin/dummy/get")
+    );
+
+    protected final static TestSecurityConfig.User NO_PERM = new TestSecurityConfig.User("no_perm").roles(new Role("no_perm_role"));
+
+    protected final static TestSecurityConfig.User UNREGISTERED = new TestSecurityConfig.User("unregistered");
+
+    public static final String UNPROTECTED_BASE_ENDPOINT = "_plugins/_dummy";
+    public static final String PROTECTED_BASE_ENDPOINT = "_plugins/_dummy_protected";
+    public static final String UNPROTECTED_API = UNPROTECTED_BASE_ENDPOINT + "/dummy";
+    public static final String PROTECTED_API = PROTECTED_BASE_ENDPOINT + "/dummy";
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(REST_ONLY, REST_PLUS_TRANSPORT, TRANSPORT_ONLY, NO_PERM)
+        .plugin(CustomLegacyTestPlugin.class)
+        .plugin(CustomRestProtectedTestPlugin.class)
+        .audit(
+            new AuditConfiguration(true).compliance(new AuditCompliance().enabled(true))
+                .filters(new AuditFilters().enabledRest(true).enabledTransport(true).resolveBulkRequests(true))
+        )
+        .build();
+
+    @Rule
+    public AuditLogsRule auditLogsRule = new AuditLogsRule();
+
+    /** Basic Access check */
+
+    @Test
+    public void testShouldNotAllowUnregisteredUsers() {
+        try (TestRestClient client = cluster.getRestClient(UNREGISTERED)) {
+            // Legacy plugin
+            assertThat(client.get(UNPROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(FAILED_LOGIN, UNREGISTERED, GET, "/" + UNPROTECTED_API));
+
+            // Protected Routes plugin
+            assertThat(client.get(PROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(FAILED_LOGIN, UNREGISTERED, GET, "/" + PROTECTED_API));
+        }
+    }
+
+    @Test
+    public void testAccessDeniedForUserWithNoPermissions() {
+        try (TestRestClient client = cluster.getRestClient(NO_PERM)) {
+            // fail at Transport (won't have a rest authz success audit log since this is not a protected endpoint)
+            assertThat(client.get(UNPROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(MISSING_PRIVILEGES, NO_PERM, "DummyRequest", "cluster:admin/dummy_plugin/dummy")
+            );
+
+            // fail at REST
+            assertThat(client.get(PROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(MISSING_PRIVILEGES, NO_PERM, GET, "/" + PROTECTED_API));
+        }
+    }
+
+    /** AuthZ in REST Layer check */
+
+    @Test
+    public void testShouldAllowAtRestAndBlockAtTransport() {
+        try (TestRestClient client = cluster.getRestClient(REST_ONLY)) {
+            assertThat(client.get(PROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+            // granted at Rest layer
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(GRANTED_PRIVILEGES, REST_ONLY, GET, "/" + PROTECTED_API));
+            // missing at Transport layer
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(
+                    MISSING_PRIVILEGES,
+                    REST_ONLY,
+                    "DummyRequest",
+                    "cluster:admin/dummy_protected_plugin/dummy/get"
+                )
+            );
+        }
+    }
+
+    @Test
+    public void testShouldAllowAtRestAndTransport() {
+        try (TestRestClient client = cluster.getRestClient(REST_PLUS_TRANSPORT)) {
+            assertOKResponseFromProtectedPlugin(client);
+
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(GRANTED_PRIVILEGES, REST_PLUS_TRANSPORT, GET, "/" + PROTECTED_API));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(
+                    GRANTED_PRIVILEGES,
+                    REST_PLUS_TRANSPORT,
+                    "DummyRequest",
+                    "cluster:admin/dummy_protected_plugin/dummy/get"
+                )
+            );
+        }
+    }
+
+    @Test
+    public void testShouldBlockAccessToEndpointForWhichUserHasNoPermission() {
+        try (TestRestClient client = cluster.getRestClient(REST_ONLY)) {
+            assertThat(client.post(PROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(MISSING_PRIVILEGES, REST_ONLY, POST, "/" + PROTECTED_API));
+        }
+
+        try (TestRestClient client = cluster.getRestClient(REST_PLUS_TRANSPORT)) {
+            assertThat(client.post(PROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(MISSING_PRIVILEGES, REST_PLUS_TRANSPORT, POST, "/" + PROTECTED_API));
+        }
+    }
+
+    /** Backwards compatibility check */
+
+    @Test
+    public void testBackwardsCompatibility() {
+
+        // TRANSPORT_ONLY should have access to legacy endpoint, but not protected endpoint
+        try (TestRestClient client = cluster.getRestClient(TRANSPORT_ONLY)) {
+            TestRestClient.HttpResponse res = client.get(PROTECTED_API);
+            assertThat(res.getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(MISSING_PRIVILEGES, TRANSPORT_ONLY, GET, "/" + PROTECTED_API));
+
+            assertOKResponseFromLegacyPlugin(client);
+            // check that there is no log for REST layer AuthZ since this is an unprotected endpoint
+            auditLogsRule.assertExactly(0, privilegePredicateRESTLayer(GRANTED_PRIVILEGES, TRANSPORT_ONLY, GET, UNPROTECTED_API));
+            // check that there is exactly 1 message for Transport Layer privilege evaluation
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(GRANTED_PRIVILEGES, TRANSPORT_ONLY, "DummyRequest", "cluster:admin/dummy_plugin/dummy")
+            );
+        }
+
+        // REST_ONLY should have access to legacy endpoint (protected endpoint already tested above)
+        try (TestRestClient client = cluster.getRestClient(REST_ONLY)) {
+            assertOKResponseFromLegacyPlugin(client);
+            auditLogsRule.assertExactly(0, privilegePredicateRESTLayer(GRANTED_PRIVILEGES, REST_ONLY, GET, UNPROTECTED_API));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(GRANTED_PRIVILEGES, REST_ONLY, "DummyRequest", "cluster:admin/dummy_plugin/dummy")
+            );
+        }
+
+        // DUMMY_WITH_TRANSPORT_PERM should have access to legacy endpoint (protected endpoint already tested above)
+        try (TestRestClient client = cluster.getRestClient(REST_PLUS_TRANSPORT)) {
+            assertOKResponseFromLegacyPlugin(client);
+            auditLogsRule.assertExactly(0, privilegePredicateRESTLayer(GRANTED_PRIVILEGES, REST_PLUS_TRANSPORT, GET, UNPROTECTED_API));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(
+                    GRANTED_PRIVILEGES,
+                    REST_PLUS_TRANSPORT,
+                    "DummyRequest",
+                    "cluster:admin/dummy_plugin/dummy"
+                )
+            );
+        }
+
+        // NO_PERM should not have access to legacy endpoint (protected endpoint already tested above)
+        try (TestRestClient client = cluster.getRestClient(NO_PERM)) {
+            assertThat(client.get(UNPROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_FORBIDDEN));
+            auditLogsRule.assertExactly(0, privilegePredicateRESTLayer(MISSING_PRIVILEGES, NO_PERM, GET, UNPROTECTED_API));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateTransportLayer(MISSING_PRIVILEGES, NO_PERM, "DummyRequest", "cluster:admin/dummy_plugin/dummy")
+            );
+        }
+
+        // UNREGISTERED should not have access to legacy endpoint (protected endpoint already tested above)
+        try (TestRestClient client = cluster.getRestClient(UNREGISTERED)) {
+            assertThat(client.get(UNPROTECTED_API).getStatusCode(), equalTo(HttpStatus.SC_UNAUTHORIZED));
+            auditLogsRule.assertExactly(0, privilegePredicateRESTLayer(MISSING_PRIVILEGES, UNREGISTERED, GET, UNPROTECTED_API));
+            auditLogsRule.assertExactly(
+                0,
+                privilegePredicateTransportLayer(MISSING_PRIVILEGES, UNREGISTERED, "DummyRequest", "cluster:admin/dummy_plugin/dummy")
+            );
+            auditLogsRule.assertExactly(0, privilegePredicateRESTLayer(FAILED_LOGIN, UNREGISTERED, GET, UNPROTECTED_API));
+        }
+    }
+
+    /** Helper Methods */
+    private void assertOKResponseFromLegacyPlugin(TestRestClient client) {
+        String expectedResponseFromLegacyPlugin = "{\"response_string\":\"Hello from dummy plugin\"}";
+        TestRestClient.HttpResponse res = client.get(UNPROTECTED_API);
+        assertThat(res.getStatusCode(), equalTo(HttpStatus.SC_OK));
+        assertThat(res.getBody(), equalTo(expectedResponseFromLegacyPlugin));
+    }
+
+    private void assertOKResponseFromProtectedPlugin(TestRestClient client) {
+        String expectedResponseFromProtectedPlugin = "{\"response_string\":\"Hello from dummy protected plugin\"}";
+        TestRestClient.HttpResponse res = client.get(PROTECTED_API);
+        assertThat(res.getStatusCode(), equalTo(HttpStatus.SC_OK));
+        assertThat(res.getBody(), equalTo(expectedResponseFromProtectedPlugin));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java b/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java
new file mode 100644
index 0000000000..5ffd525951
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java
@@ -0,0 +1,326 @@
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.security.rest;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import joptsimple.internal.Strings;
+import org.apache.http.HttpStatus;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.opensearch.security.auditlog.impl.AuditMessage;
+import org.opensearch.test.framework.AuditCompliance;
+import org.opensearch.test.framework.AuditConfiguration;
+import org.opensearch.test.framework.AuditFilters;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.audit.AuditLogsRule;
+import org.opensearch.test.framework.cluster.ClusterManager;
+import org.opensearch.test.framework.cluster.LocalCluster;
+import org.opensearch.test.framework.cluster.TestRestClient;
+
+import java.time.Duration;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.lessThan;
+import static org.junit.Assert.assertTrue;
+import static org.opensearch.rest.RestRequest.Method.GET;
+import static org.opensearch.security.auditlog.impl.AuditCategory.GRANTED_PRIVILEGES;
+import static org.opensearch.security.auditlog.impl.AuditCategory.MISSING_PRIVILEGES;
+import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.grantedPrivilege;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.privilegePredicateRESTLayer;
+import static org.opensearch.test.framework.audit.AuditMessagePredicate.userAuthenticatedPredicate;
+
+@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class WhoAmITests {
+    protected final static TestSecurityConfig.User WHO_AM_I = new TestSecurityConfig.User("who_am_i_user").roles(
+        new Role("who_am_i_role").clusterPermissions("security:whoamiprotected")
+    );
+
+    protected final static TestSecurityConfig.User AUDIT_LOG_VERIFIER = new TestSecurityConfig.User("audit_log_verifier").roles(
+        new Role("audit_log_verifier_role").clusterPermissions("*").indexPermissions("*").on("*")
+    );
+
+    protected final static TestSecurityConfig.User WHO_AM_I_LEGACY = new TestSecurityConfig.User("who_am_i_user_legacy").roles(
+        new Role("who_am_i_role_legacy").clusterPermissions("cluster:admin/opendistro_security/whoamiprotected")
+    );
+
+    protected final static TestSecurityConfig.User WHO_AM_I_NO_PERM = new TestSecurityConfig.User("who_am_i_user_no_perm").roles(
+        new Role("who_am_i_role_no_perm")
+    );
+
+    protected final static TestSecurityConfig.User WHO_AM_I_UNREGISTERED = new TestSecurityConfig.User("who_am_i_user_no_perm");
+
+    protected final String expectedAuthorizedBody = "{\"dn\":null,\"is_admin\":false,\"is_node_certificate_request\":false}";
+    protected final String expectedUnuauthorizedBody =
+        "no permissions for [security:whoamiprotected] and User [name=who_am_i_user_no_perm, backend_roles=[], requestedTenant=null]";
+
+    public static final String WHOAMI_ENDPOINT = "_plugins/_security/whoami";
+    public static final String WHOAMI_PROTECTED_ENDPOINT = "_plugins/_security/whoamiprotected";
+
+    @ClassRule
+    public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS)
+        .authc(AUTHC_HTTPBASIC_INTERNAL)
+        .users(WHO_AM_I, WHO_AM_I_LEGACY, WHO_AM_I_NO_PERM, AUDIT_LOG_VERIFIER)
+        .audit(
+            new AuditConfiguration(true).compliance(new AuditCompliance().enabled(true))
+                .filters(new AuditFilters().enabledRest(true).enabledTransport(true).resolveBulkRequests(true))
+        )
+        .build();
+
+    @Rule
+    public AuditLogsRule auditLogsRule = new AuditLogsRule();
+
+    @Test
+    public void testWhoAmIWithGetPermissions() {
+
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I)) {
+            assertResponse(client.get(WHOAMI_PROTECTED_ENDPOINT), HttpStatus.SC_OK, expectedAuthorizedBody);
+
+            // audit log, named route
+            auditLogsRule.assertExactlyOne(userAuthenticatedPredicate(WHO_AM_I, GET, "/" + WHOAMI_PROTECTED_ENDPOINT));
+            auditLogsRule.assertExactlyOne(privilegePredicateRESTLayer(GRANTED_PRIVILEGES, WHO_AM_I, GET, "/" + WHOAMI_PROTECTED_ENDPOINT));
+
+            assertResponse(client.get(WHOAMI_ENDPOINT), HttpStatus.SC_OK, expectedAuthorizedBody);
+        }
+    }
+
+    @Test
+    public void testWhoAmIWithGetPermissionsLegacy() {
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I_LEGACY)) {
+            assertResponse(client.get(WHOAMI_PROTECTED_ENDPOINT), HttpStatus.SC_OK, expectedAuthorizedBody);
+
+            // audit log, named route
+            auditLogsRule.assertExactlyOne(userAuthenticatedPredicate(WHO_AM_I_LEGACY, GET, "/" + WHOAMI_PROTECTED_ENDPOINT));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateRESTLayer(GRANTED_PRIVILEGES, WHO_AM_I_LEGACY, GET, "/" + WHOAMI_PROTECTED_ENDPOINT)
+            );
+
+            assertResponse(client.get(WHOAMI_ENDPOINT), HttpStatus.SC_OK, expectedAuthorizedBody);
+        }
+    }
+
+    @Test
+    public void testWhoAmIWithoutGetPermissions() {
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I_NO_PERM)) {
+            assertResponse(client.get(WHOAMI_PROTECTED_ENDPOINT), HttpStatus.SC_UNAUTHORIZED, expectedUnuauthorizedBody);
+            // audit log, named route
+            auditLogsRule.assertExactlyOne(userAuthenticatedPredicate(WHO_AM_I_NO_PERM, GET, "/" + WHOAMI_PROTECTED_ENDPOINT));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateRESTLayer(MISSING_PRIVILEGES, WHO_AM_I_NO_PERM, GET, "/" + WHOAMI_PROTECTED_ENDPOINT)
+            );
+
+            assertResponse(client.get(WHOAMI_ENDPOINT), HttpStatus.SC_OK, expectedAuthorizedBody);
+        }
+    }
+
+    @Test
+    public void testWhoAmIPost() {
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I)) {
+            assertThat(client.post(WHOAMI_ENDPOINT).getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I_LEGACY)) {
+            assertThat(client.post(WHOAMI_ENDPOINT).getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I_NO_PERM)) {
+            assertThat(client.post(WHOAMI_ENDPOINT).getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+
+        try (TestRestClient client = cluster.getRestClient(WHO_AM_I_UNREGISTERED)) {
+            assertThat(client.post(WHOAMI_ENDPOINT).getStatusCode(), equalTo(HttpStatus.SC_OK));
+        }
+
+        // No audit logs generated because `/whoami` is passthrough at Transport Layer, and POST route is not a NamedRoute
+        auditLogsRule.assertAuditLogsCount(0, 0);
+    }
+
+    @Test
+    public void testAuditLogSimilarityWithTransportLayer() {
+        try (TestRestClient client = cluster.getRestClient(AUDIT_LOG_VERIFIER)) {
+            assertResponse(client.get(WHOAMI_PROTECTED_ENDPOINT), HttpStatus.SC_OK, expectedAuthorizedBody);
+            auditLogsRule.assertExactlyOne(userAuthenticatedPredicate(AUDIT_LOG_VERIFIER, GET, "/" + WHOAMI_PROTECTED_ENDPOINT));
+            auditLogsRule.assertExactlyOne(
+                privilegePredicateRESTLayer(GRANTED_PRIVILEGES, AUDIT_LOG_VERIFIER, GET, "/" + WHOAMI_PROTECTED_ENDPOINT)
+            );
+
+            assertThat(client.get("_cat/indices").getStatusCode(), equalTo(HttpStatus.SC_OK));
+
+            // transport layer audit messages
+            auditLogsRule.assertExactly(2, grantedPrivilege(AUDIT_LOG_VERIFIER, "GetSettingsRequest"));
+
+            List<AuditMessage> grantedPrivilegesMessages = auditLogsRule.getCurrentTestAuditMessages()
+                .stream()
+                .filter(msg -> msg.getCategory().equals(GRANTED_PRIVILEGES))
+                .collect(Collectors.toList());
+
+            verifyAuditLogSimilarity(grantedPrivilegesMessages);
+        }
+    }
+
+    private void assertResponse(TestRestClient.HttpResponse response, int expectedStatus, String expectedBody) {
+        assertThat(response.getStatusCode(), equalTo(expectedStatus));
+        assertThat(response.getBody(), equalTo(expectedBody));
+    }
+
+    private void verifyAuditLogSimilarity(List<AuditMessage> currentTestAuditMessages) {
+        List<AuditMessage> restSet = new ArrayList<>();
+        List<AuditMessage> transportSet = new ArrayList<>();
+
+        // It is okay to loop through all even though we end up using only 2, as the total number of messages should be around 8
+        for (AuditMessage auditMessage : currentTestAuditMessages) {
+            if ("REST".equals(auditMessage.getAsMap().get(AuditMessage.REQUEST_LAYER).toString())) {
+                restSet.add(auditMessage);
+            } else if ("TRANSPORT".equals(auditMessage.getAsMap().get(AuditMessage.REQUEST_LAYER).toString())) {
+                transportSet.add(auditMessage);
+            }
+        }
+        // We pass 1 message from each layer to check for similarity
+        checkForStructuralSimilarity(restSet.get(0), transportSet.get(0));
+    }
+
+    /**
+     * Checks for structural similarity between audit message generated at Rest layer vs transport layer
+     * Example REST audit message for GRANTED_PRIVILEGES:
+     * {
+     *    "audit_cluster_name":"local_cluster_1",
+     *    "audit_node_name":"data_0",
+     *    "audit_rest_request_method":"GET",
+     *    "audit_category":"GRANTED_PRIVILEGES",
+     *    "audit_request_origin":"REST",
+     *    "audit_node_id":"Dez5cwAAQAC6cdmK_____w",
+     *    "audit_request_layer":"REST",
+     *    "audit_rest_request_path":"/_plugins/_security/whoamiprotected",
+     *    "@timestamp":"2023-08-16T17:35:53.531+00:00",
+     *    "audit_format_version":4,
+     *    "audit_request_remote_address":"127.0.0.1",
+     *    "audit_node_host_address":"127.0.0.1",
+     *    "audit_rest_request_headers":{
+     *       "Connection":[
+     *          "keep-alive"
+     *       ],
+     *       "User-Agent":[
+     *          "Apache-HttpClient/5.2.1 (Java/19.0.1)"
+     *       ],
+     *       "content-length":[
+     *          "0"
+     *       ],
+     *       "Host":[
+     *          "127.0.0.1:47210"
+     *       ],
+     *       "Accept-Encoding":[
+     *          "gzip, x-gzip, deflate"
+     *       ]
+     *    },
+     *    "audit_request_effective_user":"audit_log_verifier",
+     *    "audit_node_host_name":"127.0.0.1"
+     * }
+     *
+     *
+     * Example Transport audit message for GRANTED_PRIVILEGES:
+     * {
+     *    "audit_cluster_name":"local_cluster_1",
+     *    "audit_transport_headers":{
+     *       "_system_index_access_allowed":"false"
+     *    },
+     *    "audit_node_name":"data_0",
+     *    "audit_trace_task_id":"Dez5cwAAQAC6cdmK_____w:87",
+     *    "audit_transport_request_type":"GetSettingsRequest",
+     *    "audit_category":"GRANTED_PRIVILEGES",
+     *    "audit_request_origin":"REST",
+     *    "audit_node_id":"Dez5cwAAQAC6cdmK_____w",
+     *    "audit_request_layer":"TRANSPORT",
+     *    "@timestamp":"2023-08-16T17:35:53.621+00:00",
+     *    "audit_format_version":4,
+     *    "audit_request_remote_address":"127.0.0.1",
+     *    "audit_request_privilege":"indices:monitor/settings/get",
+     *    "audit_node_host_address":"127.0.0.1",
+     *    "audit_request_effective_user":"audit_log_verifier",
+     *    "audit_node_host_name":"127.0.0.1"
+     * }
+     *
+     *
+     * @param restAuditMessage audit message generated at REST layer
+     * @param transportAuditMessage audit message generated at Transport layer
+     */
+    private void checkForStructuralSimilarity(AuditMessage restAuditMessage, AuditMessage transportAuditMessage) {
+
+        Map<String, Object> restMsgFields = restAuditMessage.getAsMap();
+        Map<String, Object> transportMsgFields = transportAuditMessage.getAsMap();
+
+        Set<String> restAuditSet = restMsgFields.keySet();
+        Set<String> transportAuditSet = transportMsgFields.keySet();
+
+        // Added a magic number here and below, because there are always 15 or more items in each message generated via Audit logs
+        assertThat(restAuditSet.size(), greaterThan(14));
+        assertThat(transportAuditSet.size(), greaterThan(14));
+
+        // check for values of common fields
+        Set<String> commonFields = new HashSet<>(restAuditSet);
+        commonFields.retainAll(transportAuditSet);
+
+        assertCommonFields(commonFields, restMsgFields, transportMsgFields);
+
+        // check for values of uncommon fields
+        restAuditSet.removeAll(transportAuditMessage.getAsMap().keySet());
+        transportAuditSet.removeAll(restAuditMessage.getAsMap().keySet());
+
+        // We compare two sets and see there were more than 10 items with same keys indicating these logs are similar
+        // There are a few headers that are generated different for REST vs TRANSPORT layer audit logs, but that is expected
+        // The end goal of this test is to ensure similarity, not equality.
+        assertThat(restAuditSet.size(), lessThan(5));
+        assertThat(transportAuditSet.size(), lessThan(5));
+
+        assertThat(restMsgFields.get("audit_rest_request_path"), equalTo("/_plugins/_security/whoamiprotected"));
+        assertThat(restMsgFields.get("audit_rest_request_method").toString(), equalTo("GET"));
+        assertThat(restMsgFields.get("audit_rest_request_headers").toString().contains("Connection"), equalTo(true));
+
+        assertThat(transportMsgFields.get("audit_transport_request_type"), equalTo("GetSettingsRequest"));
+        assertThat(transportMsgFields.get("audit_request_privilege"), equalTo("indices:monitor/settings/get"));
+        assertThat(Strings.isNullOrEmpty(transportMsgFields.get("audit_trace_task_id").toString()), equalTo(false));
+    }
+
+    private void assertCommonFields(Set<String> commonFields, Map<String, Object> restMsgFields, Map<String, Object> transportMsgFields) {
+        for (String key : commonFields) {
+            if (key.equals("@timestamp")) {
+                String restTimeStamp = restMsgFields.get(key).toString();
+                String transportTimeStamp = transportMsgFields.get(key).toString();
+
+                DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
+                LocalDateTime restDateTime = LocalDateTime.parse(restTimeStamp, formatter);
+                LocalDateTime transportDateTime = LocalDateTime.parse(transportTimeStamp, formatter);
+
+                // assert that these log messages are generated within 10 seconds of each other
+                assertTrue(Duration.between(restDateTime, transportDateTime).getSeconds() < 10);
+            } else if (key.equals("audit_request_layer")) {
+                assertThat(restMsgFields.get(key).toString(), equalTo("REST"));
+                assertThat(transportMsgFields.get(key).toString(), equalTo("TRANSPORT"));
+            } else {
+                assertThat(restMsgFields.get(key), equalTo(transportMsgFields.get(key)));
+            }
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/AuditCompliance.java b/src/integrationTest/java/org/opensearch/test/framework/AuditCompliance.java
new file mode 100644
index 0000000000..d75fc0e4e5
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/AuditCompliance.java
@@ -0,0 +1,107 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class AuditCompliance implements ToXContentObject {
+
+    private boolean enabled = false;
+
+    private Boolean writeLogDiffs;
+
+    private List<String> readIgnoreUsers;
+
+    private List<String> writeWatchedIndices;
+
+    private List<String> writeIgnoreUsers;
+
+    private Boolean readMetadataOnly;
+
+    private Boolean writeMetadataOnly;
+
+    private Boolean externalConfig;
+
+    private Boolean internalConfig;
+
+    public AuditCompliance enabled(boolean enabled) {
+        this.enabled = enabled;
+        this.writeLogDiffs = false;
+        this.readIgnoreUsers = Collections.emptyList();
+        this.writeWatchedIndices = Collections.emptyList();
+        this.writeIgnoreUsers = Collections.emptyList();
+        this.readMetadataOnly = false;
+        this.writeMetadataOnly = false;
+        this.externalConfig = false;
+        this.internalConfig = false;
+        return this;
+    }
+
+    public AuditCompliance writeLogDiffs(boolean writeLogDiffs) {
+        this.writeLogDiffs = writeLogDiffs;
+        return this;
+    }
+
+    public AuditCompliance readIgnoreUsers(List<String> list) {
+        this.readIgnoreUsers = list;
+        return this;
+    }
+
+    public AuditCompliance writeWatchedIndices(List<String> list) {
+        this.writeWatchedIndices = list;
+        return this;
+    }
+
+    public AuditCompliance writeIgnoreUsers(List<String> list) {
+        this.writeIgnoreUsers = list;
+        return this;
+    }
+
+    public AuditCompliance readMetadataOnly(boolean readMetadataOnly) {
+        this.readMetadataOnly = readMetadataOnly;
+        return this;
+    }
+
+    public AuditCompliance writeMetadataOnly(boolean writeMetadataOnly) {
+        this.writeMetadataOnly = writeMetadataOnly;
+        return this;
+    }
+
+    public AuditCompliance externalConfig(boolean externalConfig) {
+        this.externalConfig = externalConfig;
+        return this;
+    }
+
+    public AuditCompliance internalConfig(boolean internalConfig) {
+        this.internalConfig = internalConfig;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("enabled", enabled);
+        xContentBuilder.field("write_log_diffs", writeLogDiffs);
+        xContentBuilder.field("read_ignore_users", readIgnoreUsers);
+        xContentBuilder.field("write_watched_indices", writeWatchedIndices);
+        xContentBuilder.field("write_ignore_users", writeIgnoreUsers);
+        xContentBuilder.field("read_metadata_only", readMetadataOnly);
+        xContentBuilder.field("write_metadata_only", writeMetadataOnly);
+        xContentBuilder.field("external_config", externalConfig);
+        xContentBuilder.field("internal_config", internalConfig);
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/AuditConfiguration.java b/src/integrationTest/java/org/opensearch/test/framework/AuditConfiguration.java
new file mode 100644
index 0000000000..1b3f11cc83
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/AuditConfiguration.java
@@ -0,0 +1,56 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class AuditConfiguration implements ToXContentObject {
+    private final boolean enabled;
+
+    private AuditFilters filters;
+
+    private AuditCompliance compliance;
+
+    public AuditConfiguration(boolean enabled) {
+        this.filters = new AuditFilters();
+        this.compliance = new AuditCompliance();
+        this.enabled = enabled;
+    }
+
+    public boolean isEnabled() {
+        return enabled;
+    }
+
+    public AuditConfiguration filters(AuditFilters filters) {
+        this.filters = filters;
+        return this;
+    }
+
+    public AuditConfiguration compliance(AuditCompliance auditCompliance) {
+        this.compliance = auditCompliance;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        // json built here must be deserialized to org.opensearch.security.auditlog.config.AuditConfig
+        xContentBuilder.startObject();
+        xContentBuilder.field("enabled", enabled);
+
+        xContentBuilder.field("audit", filters);
+        xContentBuilder.field("compliance", compliance);
+
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/AuditFilters.java b/src/integrationTest/java/org/opensearch/test/framework/AuditFilters.java
new file mode 100644
index 0000000000..f984becefa
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/AuditFilters.java
@@ -0,0 +1,122 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class AuditFilters implements ToXContentObject {
+
+    private Boolean enabledRest;
+
+    private Boolean enabledTransport;
+
+    private Boolean logRequestBody;
+
+    private Boolean resolveIndices;
+
+    private Boolean resolveBulkRequests;
+
+    private Boolean excludeSensitiveHeaders;
+
+    private List<String> ignoreUsers;
+
+    private List<String> ignoreRequests;
+
+    private List<String> disabledRestCategories;
+
+    private List<String> disabledTransportCategories;
+
+    public AuditFilters() {
+        this.enabledRest = false;
+        this.enabledTransport = false;
+
+        this.logRequestBody = true;
+        this.resolveIndices = true;
+        this.resolveBulkRequests = false;
+        this.excludeSensitiveHeaders = true;
+
+        this.ignoreUsers = Collections.emptyList();
+        this.ignoreRequests = Collections.emptyList();
+        this.disabledRestCategories = Collections.emptyList();
+        this.disabledTransportCategories = Collections.emptyList();
+    }
+
+    public AuditFilters enabledRest(boolean enabled) {
+        this.enabledRest = enabled;
+        return this;
+    }
+
+    public AuditFilters enabledTransport(boolean enabled) {
+        this.enabledTransport = enabled;
+        return this;
+    }
+
+    public AuditFilters logRequestBody(boolean logRequestBody) {
+        this.logRequestBody = logRequestBody;
+        return this;
+    }
+
+    public AuditFilters resolveIndices(boolean resolveIndices) {
+        this.resolveIndices = resolveIndices;
+        return this;
+    }
+
+    public AuditFilters resolveBulkRequests(boolean resolveBulkRequests) {
+        this.resolveBulkRequests = resolveBulkRequests;
+        return this;
+    }
+
+    public AuditFilters excludeSensitiveHeaders(boolean excludeSensitiveHeaders) {
+        this.excludeSensitiveHeaders = excludeSensitiveHeaders;
+        return this;
+    }
+
+    public AuditFilters ignoreUsers(List<String> ignoreUsers) {
+        this.ignoreUsers = ignoreUsers;
+        return this;
+    }
+
+    public AuditFilters ignoreRequests(List<String> ignoreRequests) {
+        this.ignoreRequests = ignoreRequests;
+        return this;
+    }
+
+    public AuditFilters disabledRestCategories(List<String> disabledRestCategories) {
+        this.disabledRestCategories = disabledRestCategories;
+        return this;
+    }
+
+    public AuditFilters disabledTransportCategories(List<String> disabledTransportCategories) {
+        this.disabledTransportCategories = disabledTransportCategories;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("enable_rest", enabledRest);
+        xContentBuilder.field("enable_transport", enabledTransport);
+        xContentBuilder.field("resolve_indices", resolveIndices);
+        xContentBuilder.field("log_request_body", logRequestBody);
+        xContentBuilder.field("resolve_bulk_requests", resolveBulkRequests);
+        xContentBuilder.field("exclude_sensitive_headers", excludeSensitiveHeaders);
+        xContentBuilder.field("ignore_users", ignoreUsers);
+        xContentBuilder.field("ignore_requests", ignoreRequests);
+        xContentBuilder.field("disabled_rest_categories", disabledRestCategories);
+        xContentBuilder.field("disabled_transport_categories", disabledTransportCategories);
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/AuthFailureListeners.java b/src/integrationTest/java/org/opensearch/test/framework/AuthFailureListeners.java
new file mode 100644
index 0000000000..472d3d8d08
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/AuthFailureListeners.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class AuthFailureListeners implements ToXContentObject {
+
+    private Map<String, RateLimiting> limits = new LinkedHashMap<>();
+
+    public AuthFailureListeners addRateLimit(RateLimiting rateLimiting) {
+        Objects.requireNonNull(rateLimiting, "Rate limiting is required");
+        limits.put(rateLimiting.getName(), rateLimiting);
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        for (Map.Entry<String, RateLimiting> entry : limits.entrySet()) {
+            xContentBuilder.field(entry.getKey(), entry.getValue());
+        }
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/AuthorizationBackend.java b/src/integrationTest/java/org/opensearch/test/framework/AuthorizationBackend.java
new file mode 100644
index 0000000000..521d35ed46
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/AuthorizationBackend.java
@@ -0,0 +1,45 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Supplier;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class AuthorizationBackend implements ToXContentObject {
+    private final String type;
+    private Supplier<Map<String, Object>> config;
+
+    public AuthorizationBackend(String type) {
+        this.type = type;
+    }
+
+    public AuthorizationBackend config(Map<String, Object> ldapConfig) {
+        return config(() -> ldapConfig);
+    }
+
+    public AuthorizationBackend config(Supplier<Map<String, Object>> ldapConfigSupplier) {
+        this.config = Objects.requireNonNull(ldapConfigSupplier, "Configuration supplier is required");
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("type", type);
+        xContentBuilder.field("config", config.get());
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/AuthzDomain.java b/src/integrationTest/java/org/opensearch/test/framework/AuthzDomain.java
new file mode 100644
index 0000000000..5ccf1f9ee0
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/AuthzDomain.java
@@ -0,0 +1,70 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+/**
+* The class represents authorization domain
+*/
+public class AuthzDomain implements ToXContentObject {
+
+    private final String id;
+
+    private String description;
+
+    private boolean httpEnabled;
+
+    private boolean transportEnabled;
+
+    private AuthorizationBackend authorizationBackend;
+
+    public AuthzDomain(String id) {
+        this.id = id;
+    }
+
+    public String getId() {
+        return id;
+    }
+
+    public AuthzDomain description(String description) {
+        this.description = description;
+        return this;
+    }
+
+    public AuthzDomain httpEnabled(boolean httpEnabled) {
+        this.httpEnabled = httpEnabled;
+        return this;
+    }
+
+    public AuthzDomain authorizationBackend(AuthorizationBackend authorizationBackend) {
+        this.authorizationBackend = authorizationBackend;
+        return this;
+    }
+
+    public AuthzDomain transportEnabled(boolean transportEnabled) {
+        this.transportEnabled = transportEnabled;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("description", description);
+        xContentBuilder.field("http_enabled", httpEnabled);
+        xContentBuilder.field("transport_enabled", transportEnabled);
+        xContentBuilder.field("authorization_backend", authorizationBackend);
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/JwtConfigBuilder.java b/src/integrationTest/java/org/opensearch/test/framework/JwtConfigBuilder.java
new file mode 100644
index 0000000000..48dfa128e0
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/JwtConfigBuilder.java
@@ -0,0 +1,62 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.util.Map;
+import java.util.Objects;
+
+import com.google.common.collect.ImmutableMap.Builder;
+
+import static org.apache.commons.lang3.StringUtils.isNoneBlank;
+
+public class JwtConfigBuilder {
+    private String jwtHeader;
+    private String signingKey;
+    private String subjectKey;
+    private String rolesKey;
+
+    public JwtConfigBuilder jwtHeader(String jwtHeader) {
+        this.jwtHeader = jwtHeader;
+        return this;
+    }
+
+    public JwtConfigBuilder signingKey(String signingKey) {
+        this.signingKey = signingKey;
+        return this;
+    }
+
+    public JwtConfigBuilder subjectKey(String subjectKey) {
+        this.subjectKey = subjectKey;
+        return this;
+    }
+
+    public JwtConfigBuilder rolesKey(String rolesKey) {
+        this.rolesKey = rolesKey;
+        return this;
+    }
+
+    public Map<String, Object> build() {
+        Builder<String, Object> builder = new Builder<>();
+        if (Objects.isNull(signingKey)) {
+            throw new IllegalStateException("Signing key is required.");
+        }
+        builder.put("signing_key", signingKey);
+        if (isNoneBlank(jwtHeader)) {
+            builder.put("jwt_header", jwtHeader);
+        }
+        if (isNoneBlank(subjectKey)) {
+            builder.put("subject_key", subjectKey);
+        }
+        if (isNoneBlank(rolesKey)) {
+            builder.put("roles_key", rolesKey);
+        }
+        return builder.build();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/LdapAuthenticationConfigBuilder.java b/src/integrationTest/java/org/opensearch/test/framework/LdapAuthenticationConfigBuilder.java
new file mode 100644
index 0000000000..07f1836b59
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/LdapAuthenticationConfigBuilder.java
@@ -0,0 +1,119 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+/**
+* @param <T> is related to subclasses thus method defined in the class <code>LdapAuthenticationConfigBuilder</code> return proper subclass
+*           type so that all method defined in subclass are available in one of builder superclass method is invoked. Please see
+*           {@link LdapAuthorizationConfigBuilder}
+*/
+public class LdapAuthenticationConfigBuilder<T extends LdapAuthenticationConfigBuilder> {
+    private boolean enableSsl = false;
+    private boolean enableStartTls = false;
+    private boolean enableSslClientAuth = false;
+    private boolean verifyHostnames = false;
+    private List<String> hosts;
+    private String bindDn;
+    private String password;
+    private String userBase;
+    private String userSearch;
+    private String usernameAttribute;
+
+    private String penTrustedCasFilePath;
+
+    /**
+    * Subclass of <code>this</code>
+    */
+    private final T builderSubclass;
+
+    protected LdapAuthenticationConfigBuilder(Function<LdapAuthenticationConfigBuilder, T> thisCastFunction) {
+        this.builderSubclass = thisCastFunction.apply(this);
+    }
+
+    public static LdapAuthenticationConfigBuilder<LdapAuthenticationConfigBuilder> config() {
+        return new LdapAuthenticationConfigBuilder<>(Function.identity());
+    }
+
+    public T enableSsl(boolean enableSsl) {
+        this.enableSsl = enableSsl;
+        return builderSubclass;
+    }
+
+    public T enableStartTls(boolean enableStartTls) {
+        this.enableStartTls = enableStartTls;
+        return builderSubclass;
+    }
+
+    public T enableSslClientAuth(boolean enableSslClientAuth) {
+        this.enableSslClientAuth = enableSslClientAuth;
+        return builderSubclass;
+    }
+
+    public T verifyHostnames(boolean verifyHostnames) {
+        this.verifyHostnames = verifyHostnames;
+        return builderSubclass;
+    }
+
+    public T hosts(List<String> hosts) {
+        this.hosts = hosts;
+        return builderSubclass;
+    }
+
+    public T bindDn(String bindDn) {
+        this.bindDn = bindDn;
+        return builderSubclass;
+    }
+
+    public T password(String password) {
+        this.password = password;
+        return builderSubclass;
+    }
+
+    public T userBase(String userBase) {
+        this.userBase = userBase;
+        return builderSubclass;
+    }
+
+    public T userSearch(String userSearch) {
+        this.userSearch = userSearch;
+        return builderSubclass;
+    }
+
+    public T usernameAttribute(String usernameAttribute) {
+        this.usernameAttribute = usernameAttribute;
+        return builderSubclass;
+    }
+
+    public T penTrustedCasFilePath(String penTrustedCasFilePath) {
+        this.penTrustedCasFilePath = penTrustedCasFilePath;
+        return builderSubclass;
+    }
+
+    public Map<String, Object> build() {
+        HashMap<String, Object> config = new HashMap<>();
+        config.put("enable_ssl", enableSsl);
+        config.put("enable_start_tls", enableStartTls);
+        config.put("enable_ssl_client_auth", enableSslClientAuth);
+        config.put("verify_hostnames", verifyHostnames);
+        config.put("hosts", hosts);
+        config.put("bind_dn", bindDn);
+        config.put("password", password);
+        config.put("userbase", userBase);
+        config.put("usersearch", userSearch);
+        config.put("username_attribute", usernameAttribute);
+        config.put("pemtrustedcas_filepath", penTrustedCasFilePath);
+        return config;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/LdapAuthorizationConfigBuilder.java b/src/integrationTest/java/org/opensearch/test/framework/LdapAuthorizationConfigBuilder.java
new file mode 100644
index 0000000000..9f2a0abd83
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/LdapAuthorizationConfigBuilder.java
@@ -0,0 +1,75 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.util.List;
+import java.util.Map;
+
+public class LdapAuthorizationConfigBuilder extends LdapAuthenticationConfigBuilder<LdapAuthorizationConfigBuilder> {
+    private List<String> skipUsers;
+    private String roleBase;
+    private String roleSearch;
+    private String userRoleAttribute;
+    private String userRoleName;
+    private String roleName;
+    private boolean resolveNestedRoles;
+
+    public LdapAuthorizationConfigBuilder() {
+        super(LdapAuthorizationConfigBuilder.class::cast);
+    }
+
+    public LdapAuthorizationConfigBuilder skipUsers(List<String> skipUsers) {
+        this.skipUsers = skipUsers;
+        return this;
+    }
+
+    public LdapAuthorizationConfigBuilder roleBase(String roleBase) {
+        this.roleBase = roleBase;
+        return this;
+    }
+
+    public LdapAuthorizationConfigBuilder roleSearch(String roleSearch) {
+        this.roleSearch = roleSearch;
+        return this;
+    }
+
+    public LdapAuthorizationConfigBuilder userRoleAttribute(String userRoleAttribute) {
+        this.userRoleAttribute = userRoleAttribute;
+        return this;
+    }
+
+    public LdapAuthorizationConfigBuilder userRoleName(String userRoleName) {
+        this.userRoleName = userRoleName;
+        return this;
+    }
+
+    public LdapAuthorizationConfigBuilder roleName(String roleName) {
+        this.roleName = roleName;
+        return this;
+    }
+
+    public LdapAuthorizationConfigBuilder resolveNestedRoles(boolean resolveNestedRoles) {
+        this.resolveNestedRoles = resolveNestedRoles;
+        return this;
+    }
+
+    @Override
+    public Map<String, Object> build() {
+        Map<String, Object> map = super.build();
+        map.put("skip_users", skipUsers);
+        map.put("rolebase", roleBase);
+        map.put("rolesearch", roleSearch);
+        map.put("userroleattribute", userRoleAttribute);
+        map.put("userrolename", userRoleName);
+        map.put("rolename", roleName);
+        map.put("resolve_nested_roles", resolveNestedRoles);
+        return map;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/OnBehalfOfConfig.java b/src/integrationTest/java/org/opensearch/test/framework/OnBehalfOfConfig.java
new file mode 100644
index 0000000000..63e1544f98
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/OnBehalfOfConfig.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright OpenSearch Contributors
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ */
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.opensearch.core.xcontent.ToXContent;
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class OnBehalfOfConfig implements ToXContentObject {
+    private Boolean oboEnabled;
+    private String signing_key;
+    private String encryption_key;
+
+    public OnBehalfOfConfig oboEnabled(Boolean oboEnabled) {
+        this.oboEnabled = oboEnabled;
+        return this;
+    }
+
+    public OnBehalfOfConfig signingKey(String signing_key) {
+        this.signing_key = signing_key;
+        return this;
+    }
+
+    public OnBehalfOfConfig encryptionKey(String encryption_key) {
+        this.encryption_key = encryption_key;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, ToXContent.Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("enabled", oboEnabled);
+        xContentBuilder.field("signing_key", signing_key);
+        if (StringUtils.isNoneBlank(encryption_key)) {
+            xContentBuilder.field("encryption_key", encryption_key);
+        }
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/RateLimiting.java b/src/integrationTest/java/org/opensearch/test/framework/RateLimiting.java
new file mode 100644
index 0000000000..bd38aac1e5
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/RateLimiting.java
@@ -0,0 +1,85 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.util.Objects;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+public class RateLimiting implements ToXContentObject {
+
+    private final String name;
+    private String type;
+    private String authenticationBackend;
+    private Integer allowedTries;
+    private Integer timeWindowSeconds;
+    private Integer blockExpirySeconds;
+    private Integer maxBlockedClients;
+    private Integer maxTrackedClients;
+
+    public String getName() {
+        return name;
+    }
+
+    public RateLimiting(String name) {
+        this.name = Objects.requireNonNull(name, "Rate limit name is required.");
+    }
+
+    public RateLimiting type(String type) {
+        this.type = type;
+        return this;
+    }
+
+    public RateLimiting authenticationBackend(String authenticationBackend) {
+        this.authenticationBackend = authenticationBackend;
+        return this;
+    }
+
+    public RateLimiting allowedTries(Integer allowedTries) {
+        this.allowedTries = allowedTries;
+        return this;
+    }
+
+    public RateLimiting timeWindowSeconds(Integer timeWindowSeconds) {
+        this.timeWindowSeconds = timeWindowSeconds;
+        return this;
+    }
+
+    public RateLimiting blockExpirySeconds(Integer blockExpirySeconds) {
+        this.blockExpirySeconds = blockExpirySeconds;
+        return this;
+    }
+
+    public RateLimiting maxBlockedClients(Integer maxBlockedClients) {
+        this.maxBlockedClients = maxBlockedClients;
+        return this;
+    }
+
+    public RateLimiting maxTrackedClients(Integer maxTrackedClients) {
+        this.maxTrackedClients = maxTrackedClients;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("type", type);
+        xContentBuilder.field("authentication_backend", authenticationBackend);
+        xContentBuilder.field("allowed_tries", allowedTries);
+        xContentBuilder.field("time_window_seconds", timeWindowSeconds);
+        xContentBuilder.field("block_expiry_seconds", blockExpirySeconds);
+        xContentBuilder.field("max_blocked_clients", maxBlockedClients);
+        xContentBuilder.field("max_tracked_clients", maxTrackedClients);
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/RolesMapping.java b/src/integrationTest/java/org/opensearch/test/framework/RolesMapping.java
new file mode 100644
index 0000000000..997e7e128b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/RolesMapping.java
@@ -0,0 +1,108 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+
+import static java.util.Objects.requireNonNull;
+
+/**
+* The class represents mapping between backend roles {@link #backendRoles} to OpenSearch role defined by field {@link #roleName}. The
+* class provides convenient builder-like methods and can be serialized to JSON. Serialization to JSON is required to store the class
+* in an OpenSearch index which contains Security plugin configuration.
+*/
+public class RolesMapping implements ToXContentObject {
+
+    /**
+    * OpenSearch role name
+    */
+    private String roleName;
+
+    /**
+    * Backend role names
+    */
+    private List<String> backendRoles;
+    private List<String> hostIPs;
+
+    private boolean reserved = false;
+
+    /**
+    * Creates roles mapping to OpenSearch role defined by parameter <code>role</code>
+    * @param role OpenSearch role, must not be <code>null</code>.
+    */
+    public RolesMapping(Role role) {
+        requireNonNull(role);
+        this.roleName = requireNonNull(role.getName());
+        this.backendRoles = new ArrayList<>();
+        this.hostIPs = new ArrayList<>();
+    }
+
+    /**
+    * Defines backend role names
+    * @param backendRoles backend roles names
+    * @return current {@link RolesMapping} instance
+    */
+    public RolesMapping backendRoles(String... backendRoles) {
+        this.backendRoles.addAll(Arrays.asList(backendRoles));
+        return this;
+    }
+
+    /**
+     * Defines host IP address
+     * @param hostIPs host IP address
+     * @return current {@link RolesMapping} instance
+     */
+    public RolesMapping hostIPs(String... hostIPs) {
+        this.hostIPs.addAll(Arrays.asList(hostIPs));
+        return this;
+    }
+
+    /**
+    * Determines if role is reserved
+    * @param reserved <code>true</code> for reserved roles
+    * @return current {@link RolesMapping} instance
+    */
+    public RolesMapping reserved(boolean reserved) {
+        this.reserved = reserved;
+        return this;
+    }
+
+    /**
+    * Returns OpenSearch role name
+    * @return role name
+    */
+    public String getRoleName() {
+        return roleName;
+    }
+
+    /**
+    * Controls serialization to JSON
+    * @param xContentBuilder must not be <code>null</code>
+    * @param params not used parameter, but required by the interface {@link ToXContentObject}
+    * @return builder form parameter <code>xContentBuilder</code>
+    * @throws IOException denotes error during serialization to JSON
+    */
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("reserved", reserved);
+        xContentBuilder.field("backend_roles", backendRoles);
+        xContentBuilder.field("hosts", hostIPs);
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/TestIndex.java b/src/integrationTest/java/org/opensearch/test/framework/TestIndex.java
new file mode 100644
index 0000000000..6f6bd935a5
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/TestIndex.java
@@ -0,0 +1,83 @@
+/*
+* Copyright 2021-2022 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework;
+
+import org.opensearch.action.admin.indices.create.CreateIndexRequest;
+import org.opensearch.client.Client;
+import org.opensearch.common.settings.Settings;
+
+public class TestIndex {
+
+    private final String name;
+    private final Settings settings;
+
+    public TestIndex(String name, Settings settings) {
+        this.name = name;
+        this.settings = settings;
+
+    }
+
+    public void create(Client client) {
+        client.admin().indices().create(new CreateIndexRequest(name).settings(settings)).actionGet();
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public static Builder name(String name) {
+        return new Builder().name(name);
+    }
+
+    public static class Builder {
+        private String name;
+        private Settings.Builder settings = Settings.builder();
+
+        public Builder name(String name) {
+            this.name = name;
+            return this;
+        }
+
+        public Builder setting(String name, int value) {
+            settings.put(name, value);
+            return this;
+        }
+
+        public Builder shards(int value) {
+            settings.put("index.number_of_shards", 5);
+            return this;
+        }
+
+        public TestIndex build() {
+            return new TestIndex(name, settings.build());
+        }
+
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java b/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java
new file mode 100644
index 0000000000..2fd3fc474d
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java
@@ -0,0 +1,731 @@
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.nio.ByteBuffer;
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.bouncycastle.crypto.generators.OpenBSDBCrypt;
+
+import org.opensearch.action.admin.indices.create.CreateIndexRequest;
+import org.opensearch.action.index.IndexRequest;
+import org.opensearch.action.update.UpdateRequest;
+import org.opensearch.client.Client;
+import org.opensearch.core.common.bytes.BytesReference;
+import org.opensearch.common.xcontent.XContentFactory;
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+import org.opensearch.security.securityconf.impl.CType;
+import org.opensearch.test.framework.cluster.OpenSearchClientProvider.UserCredentialsHolder;
+
+import static org.apache.http.HttpHeaders.AUTHORIZATION;
+import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
+
+/**
+* This class allows the declarative specification of the security configuration; in particular:
+*
+* - config.yml
+* - internal_users.yml
+* - roles.yml
+* - roles_mapping.yml
+*
+* The class does the whole round-trip, i.e., the configuration is serialized to YAML/JSON and then written to
+* the configuration index of the security plugin.
+*/
+public class TestSecurityConfig {
+
+    private static final Logger log = LogManager.getLogger(TestSecurityConfig.class);
+
+    private Config config = new Config();
+    private Map<String, User> internalUsers = new LinkedHashMap<>();
+    private Map<String, Role> roles = new LinkedHashMap<>();
+    private AuditConfiguration auditConfiguration;
+    private Map<String, RolesMapping> rolesMapping = new LinkedHashMap<>();
+
+    private String indexName = ".opendistro_security";
+
+    public TestSecurityConfig() {
+
+    }
+
+    public TestSecurityConfig configIndexName(String configIndexName) {
+        this.indexName = configIndexName;
+        return this;
+    }
+
+    public TestSecurityConfig authFailureListeners(AuthFailureListeners listener) {
+        config.authFailureListeners(listener);
+        return this;
+    }
+
+    public TestSecurityConfig anonymousAuth(boolean anonymousAuthEnabled) {
+        config.anonymousAuth(anonymousAuthEnabled);
+        return this;
+    }
+
+    public TestSecurityConfig doNotFailOnForbidden(boolean doNotFailOnForbidden) {
+        config.doNotFailOnForbidden(doNotFailOnForbidden);
+        return this;
+    }
+
+    public TestSecurityConfig xff(XffConfig xffConfig) {
+        config.xffConfig(xffConfig);
+        return this;
+    }
+
+    public TestSecurityConfig onBehalfOf(OnBehalfOfConfig onBehalfOfConfig) {
+        config.onBehalfOfConfig(onBehalfOfConfig);
+        return this;
+    }
+
+    public TestSecurityConfig authc(AuthcDomain authcDomain) {
+        config.authc(authcDomain);
+        return this;
+    }
+
+    public TestSecurityConfig authz(AuthzDomain authzDomain) {
+        config.authz(authzDomain);
+        return this;
+    }
+
+    public TestSecurityConfig user(User user) {
+        this.internalUsers.put(user.name, user);
+
+        for (Role role : user.roles) {
+            this.roles.put(role.name, role);
+        }
+
+        return this;
+    }
+
+    public List<User> getUsers() {
+        return new ArrayList<>(internalUsers.values());
+    }
+
+    public TestSecurityConfig roles(Role... roles) {
+        for (Role role : roles) {
+            if (this.roles.containsKey(role.name)) {
+                throw new IllegalStateException("Role with name " + role.name + " is already defined");
+            }
+            this.roles.put(role.name, role);
+        }
+
+        return this;
+    }
+
+    public TestSecurityConfig audit(AuditConfiguration auditConfiguration) {
+        this.auditConfiguration = auditConfiguration;
+        return this;
+    }
+
+    public TestSecurityConfig rolesMapping(RolesMapping... mappings) {
+        for (RolesMapping mapping : mappings) {
+            String roleName = mapping.getRoleName();
+            if (rolesMapping.containsKey(roleName)) {
+                throw new IllegalArgumentException("Role mapping " + roleName + " already exists");
+            }
+            this.rolesMapping.put(roleName, mapping);
+        }
+        return this;
+    }
+
+    public static class Config implements ToXContentObject {
+        private boolean anonymousAuth;
+
+        private Boolean doNotFailOnForbidden;
+        private XffConfig xffConfig;
+        private OnBehalfOfConfig onBehalfOfConfig;
+        private Map<String, AuthcDomain> authcDomainMap = new LinkedHashMap<>();
+
+        private AuthFailureListeners authFailureListeners;
+        private Map<String, AuthzDomain> authzDomainMap = new LinkedHashMap<>();
+
+        public Config anonymousAuth(boolean anonymousAuth) {
+            this.anonymousAuth = anonymousAuth;
+            return this;
+        }
+
+        public Config doNotFailOnForbidden(Boolean doNotFailOnForbidden) {
+            this.doNotFailOnForbidden = doNotFailOnForbidden;
+            return this;
+        }
+
+        public Config xffConfig(XffConfig xffConfig) {
+            this.xffConfig = xffConfig;
+            return this;
+        }
+
+        public Config onBehalfOfConfig(OnBehalfOfConfig onBehalfOfConfig) {
+            this.onBehalfOfConfig = onBehalfOfConfig;
+            return this;
+        }
+
+        public Config authc(AuthcDomain authcDomain) {
+            authcDomainMap.put(authcDomain.id, authcDomain);
+            return this;
+        }
+
+        public Config authFailureListeners(AuthFailureListeners authFailureListeners) {
+            this.authFailureListeners = authFailureListeners;
+            return this;
+        }
+
+        public Config authz(AuthzDomain authzDomain) {
+            authzDomainMap.put(authzDomain.getId(), authzDomain);
+            return this;
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+            xContentBuilder.startObject();
+            xContentBuilder.startObject("dynamic");
+
+            if (onBehalfOfConfig != null) {
+                xContentBuilder.field("on_behalf_of", onBehalfOfConfig);
+            }
+
+            if (anonymousAuth || (xffConfig != null)) {
+                xContentBuilder.startObject("http");
+                xContentBuilder.field("anonymous_auth_enabled", anonymousAuth);
+                if (xffConfig != null) {
+                    xContentBuilder.field("xff", xffConfig);
+                }
+                xContentBuilder.endObject();
+            }
+            if (doNotFailOnForbidden != null) {
+                xContentBuilder.field("do_not_fail_on_forbidden", doNotFailOnForbidden);
+            }
+
+            xContentBuilder.field("authc", authcDomainMap);
+            if (authzDomainMap.isEmpty() == false) {
+                xContentBuilder.field("authz", authzDomainMap);
+            }
+
+            if (authFailureListeners != null) {
+                xContentBuilder.field("auth_failure_listeners", authFailureListeners);
+            }
+
+            xContentBuilder.endObject();
+            xContentBuilder.endObject();
+            return xContentBuilder;
+        }
+    }
+
+    public static class User implements UserCredentialsHolder, ToXContentObject {
+
+        public final static TestSecurityConfig.User USER_ADMIN = new TestSecurityConfig.User("admin").roles(
+            new Role("allaccess").indexPermissions("*").on("*").clusterPermissions("*")
+        );
+
+        String name;
+        private String password;
+        List<Role> roles = new ArrayList<>();
+        private Map<String, Object> attributes = new HashMap<>();
+
+        public User(String name) {
+            this.name = name;
+            this.password = "secret";
+        }
+
+        public User password(String password) {
+            this.password = password;
+            return this;
+        }
+
+        public User roles(Role... roles) {
+            // We scope the role names by user to keep tests free of potential side effects
+            String roleNamePrefix = "user_" + this.getName() + "__";
+            this.roles.addAll(
+                Arrays.asList(roles).stream().map((r) -> r.clone().name(roleNamePrefix + r.getName())).collect(Collectors.toSet())
+            );
+            return this;
+        }
+
+        public User attr(String key, Object value) {
+            this.attributes.put(key, value);
+            return this;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public String getPassword() {
+            return password;
+        }
+
+        public Set<String> getRoleNames() {
+            return roles.stream().map(Role::getName).collect(Collectors.toSet());
+        }
+
+        public Object getAttribute(String attributeName) {
+            return attributes.get(attributeName);
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+            xContentBuilder.startObject();
+
+            xContentBuilder.field("hash", hash(password.toCharArray()));
+
+            Set<String> roleNames = getRoleNames();
+
+            if (!roleNames.isEmpty()) {
+                xContentBuilder.field("opendistro_security_roles", roleNames);
+            }
+
+            if (attributes != null && attributes.size() != 0) {
+                xContentBuilder.field("attributes", attributes);
+            }
+
+            xContentBuilder.endObject();
+            return xContentBuilder;
+        }
+    }
+
+    public static class Role implements ToXContentObject {
+        public static Role ALL_ACCESS = new Role("all_access").clusterPermissions("*").indexPermissions("*").on("*");
+
+        private String name;
+        private List<String> clusterPermissions = new ArrayList<>();
+
+        private List<IndexPermission> indexPermissions = new ArrayList<>();
+
+        public Role(String name) {
+            this.name = name;
+        }
+
+        public Role clusterPermissions(String... clusterPermissions) {
+            this.clusterPermissions.addAll(Arrays.asList(clusterPermissions));
+            return this;
+        }
+
+        public IndexPermission indexPermissions(String... indexPermissions) {
+            return new IndexPermission(this, indexPermissions);
+        }
+
+        public Role name(String name) {
+            this.name = name;
+            return this;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public Role clone() {
+            Role role = new Role(this.name);
+            role.clusterPermissions.addAll(this.clusterPermissions);
+            role.indexPermissions.addAll(this.indexPermissions);
+            return role;
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+            xContentBuilder.startObject();
+
+            if (!clusterPermissions.isEmpty()) {
+                xContentBuilder.field("cluster_permissions", clusterPermissions);
+            }
+
+            if (!indexPermissions.isEmpty()) {
+                xContentBuilder.field("index_permissions", indexPermissions);
+            }
+
+            xContentBuilder.endObject();
+            return xContentBuilder;
+        }
+    }
+
+    public static class IndexPermission implements ToXContentObject {
+        private List<String> allowedActions;
+        private List<String> indexPatterns;
+        private Role role;
+        private String dlsQuery;
+        private List<String> fls;
+        private List<String> maskedFields;
+
+        IndexPermission(Role role, String... allowedActions) {
+            this.allowedActions = Arrays.asList(allowedActions);
+            this.role = role;
+        }
+
+        public IndexPermission dls(String dlsQuery) {
+            this.dlsQuery = dlsQuery;
+            return this;
+        }
+
+        public IndexPermission fls(String... fls) {
+            this.fls = Arrays.asList(fls);
+            return this;
+        }
+
+        public IndexPermission maskedFields(String... maskedFields) {
+            this.maskedFields = Arrays.asList(maskedFields);
+            return this;
+        }
+
+        public Role on(String... indexPatterns) {
+            this.indexPatterns = Arrays.asList(indexPatterns);
+            this.role.indexPermissions.add(this);
+            return this.role;
+        }
+
+        public Role on(TestIndex... testindices) {
+            this.indexPatterns = Arrays.asList(testindices).stream().map(TestIndex::getName).collect(Collectors.toList());
+            this.role.indexPermissions.add(this);
+            return this.role;
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+            xContentBuilder.startObject();
+
+            xContentBuilder.field("index_patterns", indexPatterns);
+            xContentBuilder.field("allowed_actions", allowedActions);
+
+            if (dlsQuery != null) {
+                xContentBuilder.field("dls", dlsQuery);
+            }
+
+            if (fls != null) {
+                xContentBuilder.field("fls", fls);
+            }
+
+            if (maskedFields != null) {
+                xContentBuilder.field("masked_fields", maskedFields);
+            }
+
+            xContentBuilder.endObject();
+            return xContentBuilder;
+        }
+    }
+
+    public static class AuthcDomain implements ToXContentObject {
+
+        private static String PUBLIC_KEY =
+            "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoqZbjLUAWc+DZTkinQAdvy1GFjPHPnxheU89hSiWoDD3NOW76H3u3T7cCDdOah2msdxSlBmCBH6wik8qLYkcV8owWukQg3PQmbEhrdPaKo0QCgomWs4nLgtmEYqcZ+QQldd82MdTlQ1QmoQmI9Uxqs1SuaKZASp3Gy19y8su5CV+FZ6BruUw9HELK055sAwl3X7j5ouabXGbcib2goBF3P52LkvbJLuWr5HDZEOeSkwIeqSeMojASM96K5SdotD+HwEyjaTjzRPL2Aa1BEQFWOQ6CFJLyLH7ZStDuPM1mJU1VxIVfMbZrhsUBjAnIhRynmWxML7YlNqkP9j6jyOIYQIDAQAB";
+
+        public static final int BASIC_AUTH_DOMAIN_ORDER = 0;
+        public final static AuthcDomain AUTHC_HTTPBASIC_INTERNAL = new TestSecurityConfig.AuthcDomain("basic", BASIC_AUTH_DOMAIN_ORDER)
+            .httpAuthenticatorWithChallenge("basic")
+            .backend("internal");
+
+        public final static AuthcDomain AUTHC_HTTPBASIC_INTERNAL_WITHOUT_CHALLENGE = new TestSecurityConfig.AuthcDomain(
+            "basic",
+            BASIC_AUTH_DOMAIN_ORDER
+        ).httpAuthenticator("basic").backend("internal");
+
+        public final static AuthcDomain DISABLED_AUTHC_HTTPBASIC_INTERNAL = new TestSecurityConfig.AuthcDomain(
+            "basic",
+            BASIC_AUTH_DOMAIN_ORDER,
+            false
+        ).httpAuthenticator("basic").backend("internal");
+
+        public final static AuthcDomain JWT_AUTH_DOMAIN = new TestSecurityConfig.AuthcDomain("jwt", 1).jwtHttpAuthenticator(
+            new JwtConfigBuilder().jwtHeader(AUTHORIZATION).signingKey(PUBLIC_KEY)
+        ).backend("noop");
+
+        private final String id;
+        private boolean enabled = true;
+        private int order;
+        private List<String> skipUsers = new ArrayList<>();
+        private HttpAuthenticator httpAuthenticator;
+        private AuthenticationBackend authenticationBackend;
+
+        public AuthcDomain(String id, int order, boolean enabled) {
+            this.id = id;
+            this.order = order;
+            this.enabled = enabled;
+        }
+
+        public AuthcDomain(String id, int order) {
+            this(id, order, true);
+        }
+
+        public AuthcDomain httpAuthenticator(String type) {
+            this.httpAuthenticator = new HttpAuthenticator(type);
+            return this;
+        }
+
+        public AuthcDomain jwtHttpAuthenticator(JwtConfigBuilder builder) {
+            this.httpAuthenticator = new HttpAuthenticator("jwt").challenge(false).config(builder.build());
+            return this;
+        }
+
+        public AuthcDomain httpAuthenticatorWithChallenge(String type) {
+            this.httpAuthenticator = new HttpAuthenticator(type).challenge(true);
+            return this;
+        }
+
+        public AuthcDomain httpAuthenticator(HttpAuthenticator httpAuthenticator) {
+            this.httpAuthenticator = httpAuthenticator;
+            return this;
+        }
+
+        public AuthcDomain backend(String type) {
+            this.authenticationBackend = new AuthenticationBackend(type);
+            return this;
+        }
+
+        public AuthcDomain backend(AuthenticationBackend authenticationBackend) {
+            this.authenticationBackend = authenticationBackend;
+            return this;
+        }
+
+        public AuthcDomain skipUsers(String... users) {
+            this.skipUsers.addAll(Arrays.asList(users));
+            return this;
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+            xContentBuilder.startObject();
+
+            xContentBuilder.field("http_enabled", enabled);
+            xContentBuilder.field("order", order);
+
+            if (httpAuthenticator != null) {
+                xContentBuilder.field("http_authenticator", httpAuthenticator);
+            }
+
+            if (authenticationBackend != null) {
+                xContentBuilder.field("authentication_backend", authenticationBackend);
+            }
+
+            if (skipUsers != null && skipUsers.size() > 0) {
+                xContentBuilder.field("skip_users", skipUsers);
+            }
+
+            xContentBuilder.endObject();
+            return xContentBuilder;
+        }
+
+        public static class HttpAuthenticator implements ToXContentObject {
+            private final String type;
+            private boolean challenge;
+            private Map<String, Object> config = new HashMap();
+
+            public HttpAuthenticator(String type) {
+                this.type = type;
+            }
+
+            public HttpAuthenticator challenge(boolean challenge) {
+                this.challenge = challenge;
+                return this;
+            }
+
+            public HttpAuthenticator config(Map<String, Object> config) {
+                this.config.putAll(config);
+                return this;
+            }
+
+            @Override
+            public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+                xContentBuilder.startObject();
+
+                xContentBuilder.field("type", type);
+                xContentBuilder.field("challenge", challenge);
+                xContentBuilder.field("config", config);
+
+                xContentBuilder.endObject();
+                return xContentBuilder;
+            }
+        }
+
+        public static class AuthenticationBackend implements ToXContentObject {
+            private final String type;
+            private Supplier<Map<String, Object>> config = () -> new HashMap();
+
+            public AuthenticationBackend(String type) {
+                this.type = type;
+            }
+
+            public AuthenticationBackend config(Map<String, Object> config) {
+                Map<String, Object> configCopy = new HashMap<>(config);
+                this.config = () -> configCopy;
+                return this;
+            }
+
+            public AuthenticationBackend config(Supplier<Map<String, Object>> configSupplier) {
+                this.config = configSupplier;
+                return this;
+            }
+
+            @Override
+            public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+                xContentBuilder.startObject();
+
+                xContentBuilder.field("type", type);
+                xContentBuilder.field("config", config.get());
+
+                xContentBuilder.endObject();
+                return xContentBuilder;
+            }
+        }
+    }
+
+    public void initIndex(Client client) {
+        Map<String, Object> settings = new HashMap<>();
+        if (indexName.startsWith(".")) {
+            settings.put("index.hidden", true);
+        }
+        client.admin().indices().create(new CreateIndexRequest(indexName).settings(settings)).actionGet();
+
+        writeSingleEntryConfigToIndex(client, CType.CONFIG, config);
+        if (auditConfiguration != null) {
+            writeSingleEntryConfigToIndex(client, CType.AUDIT, "config", auditConfiguration);
+        }
+        writeConfigToIndex(client, CType.ROLES, roles);
+        writeConfigToIndex(client, CType.INTERNALUSERS, internalUsers);
+        writeConfigToIndex(client, CType.ROLESMAPPING, rolesMapping);
+        writeEmptyConfigToIndex(client, CType.ACTIONGROUPS);
+        writeEmptyConfigToIndex(client, CType.TENANTS);
+    }
+
+    public void updateInternalUsersConfiguration(Client client, List<User> users) {
+        Map<String, ToXContentObject> userMap = new HashMap<>();
+        for (User user : users) {
+            userMap.put(user.getName(), user);
+        }
+        updateConfigInIndex(client, CType.INTERNALUSERS, userMap);
+    }
+
+    static String hash(final char[] clearTextPassword) {
+        final byte[] salt = new byte[16];
+        new SecureRandom().nextBytes(salt);
+        final String hash = OpenBSDBCrypt.generate((Objects.requireNonNull(clearTextPassword)), salt, 12);
+        Arrays.fill(salt, (byte) 0);
+        Arrays.fill(clearTextPassword, '\0');
+        return hash;
+    }
+
+    private void writeEmptyConfigToIndex(Client client, CType configType) {
+        writeConfigToIndex(client, configType, Collections.emptyMap());
+    }
+
+    private void writeConfigToIndex(Client client, CType configType, Map<String, ? extends ToXContentObject> config) {
+        try {
+            String json = configToJson(configType, config);
+
+            log.info("Writing security configuration into index " + configType + ":\n" + json);
+
+            BytesReference bytesReference = toByteReference(json);
+            client.index(
+                new IndexRequest(indexName).id(configType.toLCString())
+                    .setRefreshPolicy(IMMEDIATE)
+                    .source(configType.toLCString(), bytesReference)
+            ).actionGet();
+        } catch (Exception e) {
+            throw new RuntimeException("Error while initializing config for " + indexName, e);
+        }
+    }
+
+    private static BytesReference toByteReference(String string) throws UnsupportedEncodingException {
+        return BytesReference.fromByteBuffer(ByteBuffer.wrap(string.getBytes("utf-8")));
+    }
+
+    private void updateConfigInIndex(Client client, CType configType, Map<String, ? extends ToXContentObject> config) {
+        try {
+            String json = configToJson(configType, config);
+            BytesReference bytesReference = toByteReference(json);
+            log.info("Update configuration of type '{}' in index '{}', new value '{}'.", configType, indexName, json);
+            UpdateRequest upsert = new UpdateRequest(indexName, configType.toLCString()).doc(configType.toLCString(), bytesReference)
+                .setRefreshPolicy(IMMEDIATE);
+            client.update(upsert).actionGet();
+        } catch (Exception e) {
+            throw new RuntimeException("Error while updating config for " + indexName, e);
+        }
+    }
+
+    private static String configToJson(CType configType, Map<String, ? extends ToXContentObject> config) throws IOException {
+        XContentBuilder builder = XContentFactory.jsonBuilder();
+
+        builder.startObject();
+        builder.startObject("_meta");
+        builder.field("type", configType.toLCString());
+        builder.field("config_version", 2);
+        builder.endObject();
+
+        for (Map.Entry<String, ? extends ToXContentObject> entry : config.entrySet()) {
+            builder.field(entry.getKey(), entry.getValue());
+        }
+
+        builder.endObject();
+
+        return builder.toString();
+    }
+
+    private void writeSingleEntryConfigToIndex(Client client, CType configType, ToXContentObject config) {
+        writeSingleEntryConfigToIndex(client, configType, configType.toLCString(), config);
+    }
+
+    private void writeSingleEntryConfigToIndex(Client client, CType configType, String configurationRoot, ToXContentObject config) {
+        try {
+            XContentBuilder builder = XContentFactory.jsonBuilder();
+
+            builder.startObject();
+            builder.startObject("_meta");
+            builder.field("type", configType.toLCString());
+            builder.field("config_version", 2);
+            builder.endObject();
+
+            builder.field(configurationRoot, config);
+
+            builder.endObject();
+
+            String json = builder.toString();
+
+            log.info("Writing security plugin configuration into index " + configType + ":\n" + json);
+
+            client.index(
+                new IndexRequest(indexName).id(configType.toLCString())
+                    .setRefreshPolicy(IMMEDIATE)
+                    .source(configType.toLCString(), toByteReference(json))
+            ).actionGet();
+        } catch (Exception e) {
+            throw new RuntimeException("Error while initializing config for " + indexName, e);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/XffConfig.java b/src/integrationTest/java/org/opensearch/test/framework/XffConfig.java
new file mode 100644
index 0000000000..b1c10bfd73
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/XffConfig.java
@@ -0,0 +1,82 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework;
+
+import java.io.IOException;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+/**
+* <p>
+* XFF is an abbreviation of <code>X-Forwarded-For</code>. X-Forwarded-For is an HTTP header which contains client source IP address
+* and additionally IP addresses of proxies which forward the request.
+* The X-Forwarded-For header is used by HTTP authentication of type
+* </p>
+* <ol>
+*     <li><code>proxy</code> defined by class {@link org.opensearch.security.http.HTTPProxyAuthenticator}</li>
+*     <li><code>extended-proxy</code> defined by the class {@link org.opensearch.security.http.proxy.HTTPExtendedProxyAuthenticator}</li>
+* </ol>
+*
+* <p>
+* The above authenticators use the X-Forwarded-For to determine if an HTTP request comes from trusted proxies. The trusted proxies
+* are defined by a regular expression {@link #internalProxiesRegexp}. The proxy authentication can be applied only to HTTP requests
+* which were forwarded by trusted HTTP proxies.
+* </p>
+*
+*<p>
+*     The class can be serialized to JSON and then stored in an OpenSearch index which contains security plugin configuration.
+*</p>
+*/
+public class XffConfig implements ToXContentObject {
+
+    private final boolean enabled;
+
+    /**
+    * Regular expression used to determine if HTTP proxy is trusted or not. IP address of trusted proxies must match the regular
+    * expression defined by the below field.
+    */
+    private String internalProxiesRegexp;
+
+    private String remoteIpHeader;
+
+    public XffConfig(boolean enabled) {
+        this.enabled = enabled;
+    }
+
+    /**
+    * Builder-like method used to set value of the field {@link #internalProxiesRegexp}
+    * @param internalProxiesRegexp regular expression which matches IP address of a HTTP proxies if the proxies are trusted.
+    * @return builder
+    */
+    public XffConfig internalProxiesRegexp(String internalProxiesRegexp) {
+        this.internalProxiesRegexp = internalProxiesRegexp;
+        return this;
+    }
+
+    public XffConfig remoteIpHeader(String remoteIpHeader) {
+        this.remoteIpHeader = remoteIpHeader;
+        return this;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.startObject();
+        xContentBuilder.field("enabled", enabled);
+        xContentBuilder.field("internalProxies", internalProxiesRegexp);
+        if (StringUtils.isNoneBlank(remoteIpHeader)) {
+            xContentBuilder.field("remoteIpHeader", remoteIpHeader);
+        }
+        xContentBuilder.endObject();
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/audit/AuditLogsRule.java b/src/integrationTest/java/org/opensearch/test/framework/audit/AuditLogsRule.java
new file mode 100644
index 0000000000..3d13d731eb
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/audit/AuditLogsRule.java
@@ -0,0 +1,137 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.audit;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.awaitility.Awaitility;
+import org.awaitility.core.ConditionTimeoutException;
+import org.hamcrest.Matcher;
+import org.junit.rules.TestRule;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
+
+import org.opensearch.security.auditlog.impl.AuditMessage;
+
+import static java.util.Collections.synchronizedList;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+import static org.opensearch.test.framework.matcher.AuditMessageMatchers.atLeastCertainNumberOfAuditsFulfillPredicate;
+import static org.opensearch.test.framework.matcher.AuditMessageMatchers.exactNumberOfAuditsFulfillPredicate;
+
+public class AuditLogsRule implements TestRule {
+
+    private static final Logger log = LogManager.getLogger(AuditLogsRule.class);
+
+    private List<AuditMessage> currentTestAuditMessages;
+
+    public List<AuditMessage> getCurrentTestAuditMessages() {
+        return currentTestAuditMessages;
+    }
+
+    public void waitForAuditLogs() {
+        try {
+            TimeUnit.SECONDS.sleep(3);
+            afterWaitingForAuditLogs();
+        } catch (InterruptedException e) {
+            throw new RuntimeException("Waiting for audit logs interrupted.", e);
+        }
+    }
+
+    private void afterWaitingForAuditLogs() {
+        if (log.isDebugEnabled()) {
+            log.debug("Audit records captured during test:\n{}", auditMessagesToString(currentTestAuditMessages));
+        }
+    }
+
+    public void assertExactlyOne(Predicate<AuditMessage> predicate) {
+        assertExactly(1, predicate);
+    }
+
+    public void assertAuditLogsCount(int from, int to) {
+        int actualCount = currentTestAuditMessages.size();
+        String message = "Expected audit log count is between " + from + " and " + to + " but was " + actualCount;
+        assertThat(message, actualCount, allOf(greaterThanOrEqualTo(from), lessThanOrEqualTo(to)));
+    }
+
+    public void assertExactly(long expectedNumberOfAuditMessages, Predicate<AuditMessage> predicate) {
+        assertExactly(exactNumberOfAuditsFulfillPredicate(expectedNumberOfAuditMessages, predicate));
+    }
+
+    private void assertExactly(Matcher<List<AuditMessage>> matcher) {
+        // pollDelay - initial delay before first evaluation
+        Awaitility.await("Await for audit logs")
+            .atMost(3, TimeUnit.SECONDS)
+            .pollDelay(0, TimeUnit.MICROSECONDS)
+            .until(() -> new ArrayList<>(currentTestAuditMessages), matcher);
+    }
+
+    public void assertAtLeast(long minCount, Predicate<AuditMessage> predicate) {
+        assertExactly(atLeastCertainNumberOfAuditsFulfillPredicate(minCount, predicate));
+    }
+
+    private static String auditMessagesToString(List<AuditMessage> audits) {
+        return audits.stream().map(AuditMessage::toString).collect(Collectors.joining(",\n"));
+    }
+
+    @Override
+    public Statement apply(Statement statement, Description description) {
+        return new Statement() {
+            @Override
+            public void evaluate() throws Throwable {
+                String methodName = description.getMethodName();
+                beforeTest(methodName);
+                try {
+                    statement.evaluate();
+                } catch (ConditionTimeoutException ex) {
+                    whenTimeoutOccurs(methodName);
+                    throw ex;
+                } finally {
+                    afterTest();
+                }
+            }
+        };
+    }
+
+    private void whenTimeoutOccurs(String methodName) {
+        List<AuditMessage> copy = new ArrayList<>(currentTestAuditMessages);
+        String auditMessages = auditMessagesToString(copy);
+        log.error(
+            "Timeout occured due to insufficient number ('{}') of captured audit messages during test '{}'\n{}",
+            copy.size(),
+            methodName,
+            auditMessages
+        );
+    }
+
+    private void afterTest() {
+        TestRuleAuditLogSink.unregisterListener();
+        this.currentTestAuditMessages = null;
+    }
+
+    private void beforeTest(String methodName) {
+        log.info("Start collecting audit logs before test {}", methodName);
+        this.currentTestAuditMessages = synchronizedList(new ArrayList<>());
+        TestRuleAuditLogSink.registerListener(this);
+    }
+
+    public void onAuditMessage(AuditMessage auditMessage) {
+        currentTestAuditMessages.add(auditMessage);
+        log.debug("New audit message received '{}', total number of audit messages '{}'.", auditMessage, currentTestAuditMessages.size());
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/audit/AuditMessagePredicate.java b/src/integrationTest/java/org/opensearch/test/framework/audit/AuditMessagePredicate.java
new file mode 100644
index 0000000000..4935bf0387
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/audit/AuditMessagePredicate.java
@@ -0,0 +1,278 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.audit;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+import org.opensearch.rest.RestRequest.Method;
+import org.opensearch.security.auditlog.AuditLog.Origin;
+import org.opensearch.security.auditlog.impl.AuditCategory;
+import org.opensearch.security.auditlog.impl.AuditMessage;
+import org.opensearch.test.framework.TestSecurityConfig.User;
+
+import static org.opensearch.security.auditlog.impl.AuditCategory.AUTHENTICATED;
+import static org.opensearch.security.auditlog.impl.AuditCategory.GRANTED_PRIVILEGES;
+import static org.opensearch.security.auditlog.impl.AuditCategory.MISSING_PRIVILEGES;
+import static org.opensearch.security.auditlog.impl.AuditMessage.REQUEST_LAYER;
+import static org.opensearch.security.auditlog.impl.AuditMessage.RESOLVED_INDICES;
+import static org.opensearch.security.auditlog.impl.AuditMessage.REST_REQUEST_PATH;
+
+public class AuditMessagePredicate implements Predicate<AuditMessage> {
+
+    private final AuditCategory category;
+    private final Origin requestLayer;
+    private final String restRequestPath;
+    private final String initiatingUser;
+    private final Method requestMethod;
+    private final String transportRequestType;
+    private final String effectiveUser;
+    private final String index;
+    private final String privilege;
+
+    private AuditMessagePredicate(
+        AuditCategory category,
+        Origin requestLayer,
+        String restRequestPath,
+        String initiatingUser,
+        Method requestMethod,
+        String transportRequestType,
+        String effectiveUser,
+        String index,
+        String privilege
+    ) {
+        this.category = category;
+        this.requestLayer = requestLayer;
+        this.restRequestPath = restRequestPath;
+        this.initiatingUser = initiatingUser;
+        this.requestMethod = requestMethod;
+        this.transportRequestType = transportRequestType;
+        this.effectiveUser = effectiveUser;
+        this.index = index;
+        this.privilege = privilege;
+    }
+
+    private AuditMessagePredicate(AuditCategory category) {
+        this(category, null, null, null, null, null, null, null, null);
+    }
+
+    public static AuditMessagePredicate auditPredicate(AuditCategory category) {
+        return new AuditMessagePredicate(category);
+    }
+
+    public static AuditMessagePredicate userAuthenticated(User user) {
+        return auditPredicate(AUTHENTICATED).withInitiatingUser(user);
+    }
+
+    public static AuditMessagePredicate grantedPrivilege(User user, String requestType) {
+        return auditPredicate(GRANTED_PRIVILEGES).withLayer(Origin.TRANSPORT).withEffectiveUser(user).withTransportRequestType(requestType);
+    }
+
+    public static AuditMessagePredicate missingPrivilege(User user, String requestType) {
+        return auditPredicate(MISSING_PRIVILEGES).withLayer(Origin.TRANSPORT).withEffectiveUser(user).withTransportRequestType(requestType);
+    }
+
+    public static AuditMessagePredicate privilegePredicateTransportLayer(
+        AuditCategory category,
+        User user,
+        String requestType,
+        String privilege
+    ) {
+        return auditPredicate(category).withLayer(Origin.TRANSPORT)
+            .withEffectiveUser(user)
+            .withPrivilege(privilege)
+            .withTransportRequestType(requestType);
+    }
+
+    public static AuditMessagePredicate privilegePredicateRESTLayer(AuditCategory category, User user, Method method, String endpoint) {
+        return auditPredicate(category).withLayer(Origin.REST).withEffectiveUser(user).withRestRequest(method, endpoint);
+    }
+
+    public static AuditMessagePredicate userAuthenticatedPredicate(User user, Method method, String endpoint) {
+        return userAuthenticated(user).withLayer(Origin.REST).withRestRequest(method, endpoint).withInitiatingUser(user);
+    }
+
+    public AuditMessagePredicate withLayer(Origin layer) {
+        return new AuditMessagePredicate(
+            category,
+            layer,
+            restRequestPath,
+            initiatingUser,
+            requestMethod,
+            transportRequestType,
+            effectiveUser,
+            index,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withRequestPath(String path) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            path,
+            initiatingUser,
+            requestMethod,
+            transportRequestType,
+            effectiveUser,
+            index,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withInitiatingUser(String user) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            restRequestPath,
+            user,
+            requestMethod,
+            transportRequestType,
+            effectiveUser,
+            index,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withInitiatingUser(User user) {
+        return withInitiatingUser(user.getName());
+    }
+
+    public AuditMessagePredicate withRestMethod(Method method) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            restRequestPath,
+            initiatingUser,
+            method,
+            transportRequestType,
+            effectiveUser,
+            index,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withTransportRequestType(String type) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            restRequestPath,
+            initiatingUser,
+            requestMethod,
+            type,
+            effectiveUser,
+            index,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withEffectiveUser(String user) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            restRequestPath,
+            initiatingUser,
+            requestMethod,
+            transportRequestType,
+            user,
+            index,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withEffectiveUser(User user) {
+        return withEffectiveUser(user.getName());
+    }
+
+    public AuditMessagePredicate withRestRequest(Method method, String path) {
+        return this.withLayer(Origin.REST).withRestMethod(method).withRequestPath(path);
+    }
+
+    public AuditMessagePredicate withIndex(String indexName) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            restRequestPath,
+            initiatingUser,
+            requestMethod,
+            transportRequestType,
+            effectiveUser,
+            indexName,
+            privilege
+        );
+    }
+
+    public AuditMessagePredicate withPrivilege(String privilegeAction) {
+        return new AuditMessagePredicate(
+            category,
+            requestLayer,
+            restRequestPath,
+            initiatingUser,
+            requestMethod,
+            transportRequestType,
+            effectiveUser,
+            index,
+            privilegeAction
+        );
+    }
+
+    @Override
+    public boolean test(AuditMessage auditMessage) {
+        List<Predicate<AuditMessage>> predicates = new ArrayList<>();
+        predicates.add(audit -> Objects.isNull(category) || category.equals(audit.getCategory()));
+        predicates.add(audit -> Objects.isNull(requestLayer) || requestLayer.equals(audit.getAsMap().get(REQUEST_LAYER)));
+        predicates.add(audit -> Objects.isNull(restRequestPath) || restRequestPath.equals(audit.getAsMap().get(REST_REQUEST_PATH)));
+        predicates.add(audit -> Objects.isNull(initiatingUser) || initiatingUser.equals(audit.getInitiatingUser()));
+        predicates.add(audit -> Objects.isNull(requestMethod) || requestMethod.equals(audit.getRequestMethod()));
+        predicates.add(audit -> Objects.isNull(transportRequestType) || transportRequestType.equals(audit.getRequestType()));
+        predicates.add(audit -> Objects.isNull(effectiveUser) || effectiveUser.equals(audit.getEffectiveUser()));
+        predicates.add(audit -> Objects.isNull(index) || containIndex(audit, index));
+        predicates.add(audit -> Objects.isNull(privilege) || privilege.equals(audit.getPrivilege()));
+        return predicates.stream().reduce(Predicate::and).orElseThrow().test(auditMessage);
+    }
+
+    private boolean containIndex(AuditMessage auditMessage, String indexName) {
+        Map<String, Object> audit = auditMessage.getAsMap();
+        return Optional.ofNullable(audit.get(RESOLVED_INDICES))
+            .filter(String[].class::isInstance)
+            .map(String[].class::cast)
+            .stream()
+            .flatMap(Arrays::stream)
+            .collect(Collectors.toSet())
+            .contains(indexName);
+    }
+
+    @Override
+    public String toString() {
+        return "AuditMessagePredicate{"
+            + "category="
+            + category
+            + ", requestLayer="
+            + requestLayer
+            + ", restRequestPath='"
+            + restRequestPath
+            + '\''
+            + ", requestInitiatingUser='"
+            + initiatingUser
+            + '\''
+            + ", requestMethod="
+            + requestMethod
+            + ", transportRequestType='"
+            + transportRequestType
+            + '\''
+            + '}';
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/audit/TestRuleAuditLogSink.java b/src/integrationTest/java/org/opensearch/test/framework/audit/TestRuleAuditLogSink.java
new file mode 100644
index 0000000000..c73d57d23c
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/audit/TestRuleAuditLogSink.java
@@ -0,0 +1,51 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.audit;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.security.auditlog.impl.AuditMessage;
+import org.opensearch.security.auditlog.sink.AuditLogSink;
+
+public class TestRuleAuditLogSink extends AuditLogSink {
+    private static final Logger log = LogManager.getLogger(TestRuleAuditLogSink.class);
+
+    private static volatile AuditLogsRule listener;
+
+    public TestRuleAuditLogSink(String name, Settings settings, String settingsPrefix, AuditLogSink fallbackSink) {
+        super(name, settings, settingsPrefix, fallbackSink);
+        log.info("Test rule audit log sink created");
+    }
+
+    @Override
+    protected boolean doStore(AuditMessage auditMessage) {
+        log.debug("New audit message received '{}'.", auditMessage);
+        AuditLogsRule currentListener = listener;
+        if (currentListener != null) {
+            currentListener.onAuditMessage(auditMessage);
+        }
+        return true;
+    }
+
+    public static void registerListener(AuditLogsRule auditLogsRule) {
+        listener = auditLogsRule;
+    }
+
+    public static void unregisterListener() {
+        listener = null;
+    }
+
+    @Override
+    public boolean isHandlingBackpressure() {
+        return true;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/AlgorithmKit.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/AlgorithmKit.java
new file mode 100644
index 0000000000..60ae56410c
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/AlgorithmKit.java
@@ -0,0 +1,147 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+package org.opensearch.test.framework.certificate;
+
+import java.security.InvalidAlgorithmParameterException;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.Provider;
+import java.security.spec.ECGenParameterSpec;
+import java.util.function.Supplier;
+
+import com.google.common.base.Strings;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import static java.util.Objects.requireNonNull;
+
+/**
+* The class determines cryptographic algorithms used for certificate creation. To create certificate it is necessary to generate public
+* and private key, so-called key pair. The class encapsulates the process of key pairs creation ({@link #generateKeyPair()}),
+* thus determines algorithm used for key pair creation. Additionally, class defines also algorithms used to digitally sign a certificate.
+* Please see {@link #getSignatureAlgorithmName()}
+*/
+class AlgorithmKit {
+
+    private static final Logger log = LogManager.getLogger(AlgorithmKit.class);
+    public static final String SIGNATURE_ALGORITHM_SHA_256_WITH_RSA = "SHA256withRSA";
+    public static final String SIGNATURE_ALGORITHM_SHA_256_WITH_ECDSA = "SHA256withECDSA";
+
+    private final String signatureAlgorithmName;
+    private final Supplier<KeyPair> keyPairSupplier;
+
+    private AlgorithmKit(String signatureAlgorithmName, Supplier<KeyPair> keyPairSupplier) {
+        notEmptyAlgorithmName(signatureAlgorithmName);
+        this.signatureAlgorithmName = signatureAlgorithmName;
+        this.keyPairSupplier = requireNonNull(keyPairSupplier, "Key pair supplier is required.");
+    }
+
+    private static void notEmptyAlgorithmName(String signatureAlgorithmName) {
+        if (Strings.isNullOrEmpty(signatureAlgorithmName)) {
+            throw new RuntimeException("Algorithm name is required.");
+        }
+    }
+
+    /**
+    * Static factory method. ECDSA algorithm used for key pair creation. Signature algorithm is defined by field
+    * {@link #SIGNATURE_ALGORITHM_SHA_256_WITH_ECDSA}
+    *
+    * @param securityProvider determines cryptographic algorithm implementation
+    * @param ellipticCurve
+    * @return new instance of class {@link AlgorithmKit}
+    */
+    public static AlgorithmKit ecdsaSha256withEcdsa(Provider securityProvider, String ellipticCurve) {
+        notEmptyAlgorithmName(ellipticCurve);
+        Supplier<KeyPair> supplier = ecdsaKeyPairSupplier(requireNonNull(securityProvider, "Security provider is required"), ellipticCurve);
+        return new AlgorithmKit(SIGNATURE_ALGORITHM_SHA_256_WITH_ECDSA, supplier);
+    }
+
+    /**
+    * Static factory method. It creates object of {@link AlgorithmKit} which enforces usage of RSA algorithm for key pair generation.
+    * Signature algorithm is defined by {@link #SIGNATURE_ALGORITHM_SHA_256_WITH_RSA}
+    *
+    * @param securityProvider determines cryptographic algorithm implementation
+    * @param keySize defines key size for RSA algorithm
+    * @return new instance of class {@link AlgorithmKit}
+    */
+    public static AlgorithmKit rsaSha256withRsa(Provider securityProvider, int keySize) {
+        positiveKeySize(keySize);
+        Supplier<KeyPair> supplier = rsaKeyPairSupplier(securityProvider, keySize);
+        return new AlgorithmKit(SIGNATURE_ALGORITHM_SHA_256_WITH_RSA, supplier);
+    }
+
+    private static void positiveKeySize(int keySize) {
+        if (keySize <= 0) {
+            throw new RuntimeException("Key size must be a positive integer value, provided: " + keySize);
+        }
+    }
+
+    /**
+    * It determines algorithm used for digital signature
+    * @return algorithm name
+    */
+    public String getSignatureAlgorithmName() {
+        return signatureAlgorithmName;
+    }
+
+    /**
+    * It creates new private and public key pair
+    * @return new pair of keys
+    */
+    public KeyPair generateKeyPair() {
+        return keyPairSupplier.get();
+    }
+
+    private static Supplier<KeyPair> rsaKeyPairSupplier(Provider securityProvider, int keySize) {
+        try {
+            KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA", securityProvider);
+            log.info("Initialize key pair generator with keySize: {}", keySize);
+            generator.initialize(keySize);
+            return generator::generateKeyPair;
+        } catch (NoSuchAlgorithmException e) {
+            String message = "Error while initializing RSA asymmetric key generator.";
+            log.error(message, e);
+            throw new RuntimeException(message, e);
+        }
+    }
+
+    private static Supplier<KeyPair> ecdsaKeyPairSupplier(Provider securityProvider, String ellipticCurve) {
+        try {
+            KeyPairGenerator generator = KeyPairGenerator.getInstance("EC", securityProvider);
+            log.info("Initialize key pair generator with elliptic curve: {}", ellipticCurve);
+            ECGenParameterSpec ecsp = new ECGenParameterSpec(ellipticCurve);
+            generator.initialize(ecsp);
+            return generator::generateKeyPair;
+        } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException e) {
+            String message = "Error while initializing ECDSA asymmetric key generator.";
+            log.error(message, e);
+            throw new RuntimeException(message, e);
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificateData.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificateData.java
new file mode 100644
index 0000000000..09d0f931e6
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificateData.java
@@ -0,0 +1,89 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+package org.opensearch.test.framework.certificate;
+
+import java.security.Key;
+import java.security.KeyPair;
+import java.security.cert.CertificateException;
+import java.security.cert.X509Certificate;
+
+import org.bouncycastle.asn1.x500.X500Name;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter;
+
+/**
+* The class contains all data related to Certificate including private key which is considered to be a secret.
+*/
+public class CertificateData {
+
+    private final X509CertificateHolder certificate;
+    private final KeyPair keyPair;
+
+    public CertificateData(X509CertificateHolder certificate, KeyPair keyPair) {
+        this.certificate = certificate;
+        this.keyPair = keyPair;
+    }
+
+    /**
+    * The method returns X.509 certificate encoded in PEM format. PEM format is defined by
+    * <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>.
+    * @return Certificate in PEM format
+    */
+    public String certificateInPemFormat() {
+        return PemConverter.toPem(certificate);
+    }
+
+    public X509Certificate certificate() {
+        try {
+            return new JcaX509CertificateConverter().getCertificate(certificate);
+        } catch (CertificateException e) {
+            throw new RuntimeException("Cannot retrieve certificate", e);
+        }
+    }
+
+    /**
+    * It returns the private key associated with certificate encoded in PEM format. PEM format is defined by
+    * <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>.
+    * @param privateKeyPassword password used for private key encryption. <code>null</code> for unencrypted key.
+    * @return private key encoded in PEM format
+    */
+    public String privateKeyInPemFormat(String privateKeyPassword) {
+        return PemConverter.toPem(keyPair.getPrivate(), privateKeyPassword);
+    }
+
+    X500Name getCertificateSubject() {
+        return certificate.getSubject();
+    }
+
+    KeyPair getKeyPair() {
+        return keyPair;
+    }
+
+    public Key getKey() {
+        return keyPair.getPrivate();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificateMetadata.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificateMetadata.java
new file mode 100644
index 0000000000..cc94621f72
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificateMetadata.java
@@ -0,0 +1,220 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.certificate;
+
+// CS-SUPPRESS-SINGLE: RegexpSingleline Extension is used to refer to certificate extensions, keeping this rule disable for the whole file
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+
+import com.google.common.base.Strings;
+import org.bouncycastle.asn1.ASN1Encodable;
+import org.bouncycastle.asn1.DERSequence;
+import org.bouncycastle.asn1.x509.ExtendedKeyUsage;
+import org.bouncycastle.asn1.x509.GeneralName;
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.asn1.x509.KeyUsage;
+
+import static java.util.Arrays.asList;
+import static java.util.Collections.emptyList;
+import static java.util.Collections.emptySet;
+import static java.util.Objects.requireNonNull;
+
+/**
+* <p>
+* The class represents metadata which should be embedded in certificate to describe a certificate subject (person, company, web server,
+* IoT device). The class contains some basic metadata and metadata which should be placed in certificate extensions.
+* </p>
+*
+* <p>
+*     The class is immutable.
+* </p>
+*
+*/
+class CertificateMetadata {
+    /**
+    * Certification subject (person, company, web server, IoT device). The subject of certificate is an owner of the certificate
+    * (simplification). The format of this field must adhere to RFC 4514.
+    * @see <a href="https://www.baeldung.com/javadoc-linking-external-url">RFC 4514</a>
+    */
+    private final String subject;
+
+    /**
+    * It describes certificate expiration date
+    */
+    private final int validityDays;
+
+    /**
+    * Optionally used by Open Search to indicate that the certificate can be used by Open Search node to confirm the node identity. The
+    * value becomes a part of
+    * <a href="https://www.rfc-editor.org/rfc/rfc5280.html#section-4.2.1.6">SAN (Subject Alternative Name) extension</a>
+    *
+    * @see #dnsNames
+    * @see <a href="https://www.rfc-editor.org/rfc/rfc5280.html#section-4.2.1.6">SAN (Subject Alternative Name) extension</a>
+    */
+    private final String nodeOid;
+
+    /**
+    * The certificate contains only one {@link #subject}. This is a common limitation when a certificate is used by a web server which is
+    * associated with a few domains. To overcome this limitation SAN (Subject Alternative Name) extension was introduced.
+    * The field contains additional subject names which enables creation of so called multi-domain certificates. The extension is defined
+    * in section <a href="https://www.rfc-editor.org/rfc/rfc5280.html#section-4.2.1.6">4.2.1.6 of RFC 5280</a>
+    *
+    * @see <a href="https://www.rfc-editor.org/rfc/rfc5280.html">RFC 5280</a>
+    */
+    private final List<String> dnsNames;
+
+    /**
+    * Similar to {@link #dnsNames} but contains IP addresses instead of domains.
+    */
+    private final List<String> ipAddresses;
+
+    /**
+    * If a private key associated with certificate is used to sign other certificate then this field has to be <code>true</code>.
+    */
+    private final boolean basicConstrainIsCa;
+
+    /**
+    * Allowed usages for public key associated with certificate
+    */
+    private final Set<PublicKeyUsage> keyUsages;
+
+    private CertificateMetadata(
+        String subject,
+        int validityDays,
+        String nodeOid,
+        List<String> dnsNames,
+        List<String> ipAddresses,
+        boolean basicConstrainIsCa,
+        Set<PublicKeyUsage> keyUsages
+    ) {
+        this.subject = subject;
+        this.validityDays = validityDays;
+        this.nodeOid = nodeOid;
+        this.dnsNames = requireNonNull(dnsNames, "List of dns names must not be null.");
+        this.ipAddresses = requireNonNull(ipAddresses, "List of IP addresses must not be null");
+        this.basicConstrainIsCa = basicConstrainIsCa;
+        this.keyUsages = requireNonNull(keyUsages, "Key usage set must not be null.");
+    }
+
+    /**
+    * Static factory method. It creates metadata which contains only basic information.
+    * @param subjectName please see {@link #subject}
+    * @param validityDays please see {@link #validityDays}
+    * @return new instance of {@link CertificateMetadata}
+    */
+    public static CertificateMetadata basicMetadata(String subjectName, int validityDays) {
+        return new CertificateMetadata(subjectName, validityDays, null, emptyList(), emptyList(), false, emptySet());
+    }
+
+    /**
+    * It is related to private key associated with certificate. It specifies metadata related to allowed private key usage.
+    * @param basicConstrainIsCa {@link #basicConstrainIsCa}
+    * @param keyUsages {@link #keyUsages}
+    * @return returns newly created instance of {@link CertificateData}
+    */
+    public CertificateMetadata withKeyUsage(boolean basicConstrainIsCa, PublicKeyUsage... keyUsages) {
+        Set<PublicKeyUsage> usages = arrayToEnumSet(keyUsages);
+        return new CertificateMetadata(subject, validityDays, nodeOid, dnsNames, ipAddresses, basicConstrainIsCa, usages);
+    }
+
+    private <T extends Enum<T>> Set<T> arrayToEnumSet(T[] enumArray) {
+        if ((enumArray == null) || (enumArray.length == 0)) {
+            return Collections.emptySet();
+        }
+        return EnumSet.copyOf(asList(enumArray));
+    }
+
+    /**
+    * The method defines metadata related to SAN (Subject Alternative Name) extension.
+    * @param nodeOid {@link #nodeOid}
+    * @param dnsNames {@link #dnsNames}
+    * @param ipAddresses {@link #ipAddresses}
+    * @return new instance of {@link CertificateMetadata}
+    * @see <a href="https://www.rfc-editor.org/rfc/rfc5280.html#section-4.2.1.6">SAN (Subject Alternative Name) extension</a>
+    */
+    public CertificateMetadata withSubjectAlternativeName(String nodeOid, List<String> dnsNames, String... ipAddresses) {
+        return new CertificateMetadata(subject, validityDays, nodeOid, dnsNames, asList(ipAddresses), basicConstrainIsCa, keyUsages);
+    }
+
+    /**
+    * {@link #subject}
+    * @return Subject name
+    */
+    public String getSubject() {
+        return subject;
+    }
+
+    /**
+    * {@link #validityDays}
+    * @return determines certificate expiration date
+    */
+    public int getValidityDays() {
+        return validityDays;
+    }
+
+    /**
+    * {@link #basicConstrainIsCa}
+    * @return Determines if another certificate can be derived from certificate.
+    */
+    public boolean isBasicConstrainIsCa() {
+        return basicConstrainIsCa;
+    }
+
+    KeyUsage asKeyUsage() {
+        Integer keyUsageBitMask = keyUsages.stream()
+            .filter(PublicKeyUsage::isNotExtendedUsage)
+            .map(PublicKeyUsage::asInt)
+            .reduce(0, (accumulator, currentValue) -> accumulator | currentValue);
+        return new KeyUsage(keyUsageBitMask);
+    }
+
+    boolean hasSubjectAlternativeNameExtension() {
+        return ((ipAddresses.size() + dnsNames.size()) > 0) || (Strings.isNullOrEmpty(nodeOid) == false);
+    }
+
+    DERSequence createSubjectAlternativeNames() {
+        List<ASN1Encodable> subjectAlternativeNameList = new ArrayList<>();
+        if (!Strings.isNullOrEmpty(nodeOid)) {
+            subjectAlternativeNameList.add(new GeneralName(GeneralName.registeredID, nodeOid));
+        }
+        if (isNotEmpty(dnsNames)) {
+            for (String dnsName : dnsNames) {
+                subjectAlternativeNameList.add(new GeneralName(GeneralName.dNSName, dnsName));
+            }
+        }
+        if (isNotEmpty(ipAddresses)) {
+            for (String ip : ipAddresses) {
+                subjectAlternativeNameList.add(new GeneralName(GeneralName.iPAddress, ip));
+            }
+        }
+        return new DERSequence(subjectAlternativeNameList.toArray(ASN1Encodable[]::new));
+    }
+
+    private static <T> boolean isNotEmpty(Collection<T> collection) {
+        return (collection != null) && (!collection.isEmpty());
+    }
+
+    boolean hasExtendedKeyUsage() {
+        return keyUsages.stream().anyMatch(PublicKeyUsage::isNotExtendedUsage);
+    }
+
+    ExtendedKeyUsage getExtendedKeyUsage() {
+        KeyPurposeId[] usages = keyUsages.stream()
+            .filter(PublicKeyUsage::isExtendedUsage)
+            .map(PublicKeyUsage::getKeyPurposeId)
+            .toArray(KeyPurposeId[]::new);
+        return new ExtendedKeyUsage(usages);
+    }
+}
+// CS-ENFORCE-SINGLE
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificatesIssuer.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificatesIssuer.java
new file mode 100644
index 0000000000..6facf5f2ac
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificatesIssuer.java
@@ -0,0 +1,241 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+package org.opensearch.test.framework.certificate;
+
+// CS-SUPPRESS-SINGLE: RegexpSingleline Extension is used to refer to certificate extensions, keeping this rule disable for the whole file
+import java.math.BigInteger;
+import java.security.KeyPair;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.Provider;
+import java.security.PublicKey;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.concurrent.atomic.AtomicLong;
+
+import com.google.common.base.Strings;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.bouncycastle.asn1.DERSequence;
+import org.bouncycastle.asn1.x500.X500Name;
+import org.bouncycastle.asn1.x500.style.RFC4519Style;
+import org.bouncycastle.asn1.x509.BasicConstraints;
+import org.bouncycastle.asn1.x509.Extension;
+import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
+import org.bouncycastle.cert.CertIOException;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.cert.X509v3CertificateBuilder;
+import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils;
+import org.bouncycastle.operator.ContentSigner;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
+
+import static java.util.Objects.requireNonNull;
+
+/**
+* <p>
+* The class is used to generate public key certificate. The class hides low level details related to certificate creation and
+* usage of underlying Bouncy Castle library.
+* </p>
+* <p>
+* The public key certificate according to its name contains a public key and some metadata. The metadata describes an entity (human,
+* company, web server, IoT device, etc.) which is an owner of private key associated with the certificate (private key is not included
+* into certificate and is a kind of secret). The responsibility of the class is to issue a certificate. To issue a certificate it is
+* necessary to provide metadata which is embedded in the certificates. The metadata is represented by the class
+* {@link CertificateMetadata}. Furthermore, the class needs a public key which also must be embedded in the certificate. To obtain public
+* and private key pair the class uses {@link AlgorithmKit}. The result of creating certificate is data structure {@link CertificateData}.
+* The class {@link CertificateData} contains entire information which is necessary to use the certificate by its owner, that is:
+* certificate and private key.
+* </p>
+*
+* <p>
+*     The class is able to create self-signed certificates or certificates signed by some entity. To create a self signed certificate
+*     the method {@link #issueSignedCertificate(CertificateMetadata, CertificateData)} is used, whereas to create signed certificates
+*     the method {@link #issueSignedCertificate(CertificateMetadata, CertificateData)} is employed.
+* </p>
+* <p>
+*     The instance of the class can be obtained by invocation of static method defined in class {@link CertificatesIssuerFactory}.
+* </p>
+*/
+class CertificatesIssuer {
+
+    private static final Logger log = LogManager.getLogger(CertificatesIssuer.class);
+
+    private static final AtomicLong ID_COUNTER = new AtomicLong(System.currentTimeMillis());
+
+    private final Provider securityProvider;
+    private final AlgorithmKit algorithmKit;
+    private final JcaX509ExtensionUtils extUtils;
+
+    CertificatesIssuer(Provider securityProvider, AlgorithmKit algorithmKit) {
+        this.securityProvider = securityProvider;
+        this.algorithmKit = algorithmKit;
+        this.extUtils = getExtUtils();
+    }
+
+    /**
+    * The method creates a certificate with provided metadata and public key obtained from {@link #algorithmKit}. The result of invocation
+    * contains required data to use a certificate by its owner.
+    *
+    * @param certificateMetadata metadata which should be embedded into created certificate
+    * @return {@link CertificateData} which contain certificate and private key associated with the certificate.
+    */
+    public CertificateData issueSelfSignedCertificate(CertificateMetadata certificateMetadata) {
+        try {
+            KeyPair publicAndPrivateKey = algorithmKit.generateKeyPair();
+            X500Name issuerName = stringToX500Name(requireNonNull(certificateMetadata.getSubject(), "Certificate metadata are required."));
+            X509CertificateHolder x509CertificateHolder = buildCertificateHolder(
+                certificateMetadata,
+                issuerName,
+                publicAndPrivateKey.getPublic(),
+                publicAndPrivateKey
+            );
+            return new CertificateData(x509CertificateHolder, publicAndPrivateKey);
+        } catch (OperatorCreationException | CertIOException e) {
+            log.error("Error while generating certificate", e);
+            throw new RuntimeException("Error while generating self signed certificate", e);
+        }
+    }
+
+    /**
+    * The method is similar to {@link #issueSignedCertificate(CertificateMetadata, CertificateData)} but additionally it signs created
+    * certificate using data from <code>parentCertificateData</code>.
+    *
+    * @param metadata metadata which should be embedded into created certificate
+    * @param parentCertificateData data required to signe a newly issued certificate (private key among others things).
+    * @return {@link CertificateData} which contain certificate and private key associated with the certificate.
+    */
+    public CertificateData issueSignedCertificate(CertificateMetadata metadata, CertificateData parentCertificateData) {
+        try {
+            KeyPair publicAndPrivateKey = algorithmKit.generateKeyPair();
+            KeyPair parentKeyPair = requireNonNull(parentCertificateData, "Issuer certificate data are required").getKeyPair();
+            X500Name issuerName = parentCertificateData.getCertificateSubject();
+            var x509CertificateHolder = buildCertificateHolder(
+                requireNonNull(metadata, "Certificate metadata are required"),
+                issuerName,
+                publicAndPrivateKey.getPublic(),
+                parentKeyPair
+            );
+            return new CertificateData(x509CertificateHolder, publicAndPrivateKey);
+        } catch (OperatorCreationException | CertIOException e) {
+            log.error("Error while generating signed certificate", e);
+            throw new RuntimeException("Error while generating signed certificate", e);
+        }
+    }
+
+    private X509CertificateHolder buildCertificateHolder(
+        CertificateMetadata certificateMetadata,
+        X500Name issuerName,
+        PublicKey certificatePublicKey,
+        KeyPair parentKeyPair
+    ) throws CertIOException, OperatorCreationException {
+        X509v3CertificateBuilder builder = builderWithBasicExtensions(
+            certificateMetadata,
+            issuerName,
+            certificatePublicKey,
+            parentKeyPair.getPublic()
+        );
+        addSubjectAlternativeNameExtension(builder, certificateMetadata);
+        addExtendedKeyUsageExtension(builder, certificateMetadata);
+        return builder.build(createContentSigner(parentKeyPair.getPrivate()));
+    }
+
+    private ContentSigner createContentSigner(PrivateKey privateKey) throws OperatorCreationException {
+        return new JcaContentSignerBuilder(algorithmKit.getSignatureAlgorithmName()).setProvider(securityProvider).build(privateKey);
+    }
+
+    private void addExtendedKeyUsageExtension(X509v3CertificateBuilder builder, CertificateMetadata certificateMetadata)
+        throws CertIOException {
+        if (certificateMetadata.hasExtendedKeyUsage()) {
+            builder.addExtension(Extension.extendedKeyUsage, true, certificateMetadata.getExtendedKeyUsage());
+        }
+    }
+
+    private X509v3CertificateBuilder builderWithBasicExtensions(
+        CertificateMetadata certificateMetadata,
+        X500Name issuerName,
+        PublicKey certificatePublicKey,
+        PublicKey parentPublicKey
+    ) throws CertIOException {
+        X500Name subjectName = stringToX500Name(certificateMetadata.getSubject());
+        Date validityStartDate = new Date(System.currentTimeMillis() - (24 * 3600 * 1000));
+        Date validityEndDate = getEndDate(validityStartDate, certificateMetadata.getValidityDays());
+
+        BigInteger certificateSerialNumber = generateNextCertificateSerialNumber();
+        return new X509v3CertificateBuilder(
+            issuerName,
+            certificateSerialNumber,
+            validityStartDate,
+            validityEndDate,
+            subjectName,
+            SubjectPublicKeyInfo.getInstance(certificatePublicKey.getEncoded())
+        ).addExtension(Extension.basicConstraints, true, new BasicConstraints(certificateMetadata.isBasicConstrainIsCa()))
+            .addExtension(Extension.authorityKeyIdentifier, false, extUtils.createAuthorityKeyIdentifier(parentPublicKey))
+            .addExtension(Extension.subjectKeyIdentifier, false, extUtils.createSubjectKeyIdentifier(certificatePublicKey))
+            .addExtension(Extension.keyUsage, true, certificateMetadata.asKeyUsage());
+    }
+
+    private void addSubjectAlternativeNameExtension(X509v3CertificateBuilder builder, CertificateMetadata metadata) throws CertIOException {
+        if (metadata.hasSubjectAlternativeNameExtension()) {
+            DERSequence subjectAlternativeNames = metadata.createSubjectAlternativeNames();
+            builder.addExtension(Extension.subjectAlternativeName, false, subjectAlternativeNames);
+        }
+    }
+
+    private Date getEndDate(Date startDate, int validityDays) {
+        Calendar calendar = Calendar.getInstance();
+        calendar.setTime(startDate);
+        calendar.add(Calendar.DATE, validityDays);
+        return calendar.getTime();
+    }
+
+    private static JcaX509ExtensionUtils getExtUtils() {
+        try {
+            return new JcaX509ExtensionUtils();
+        } catch (NoSuchAlgorithmException e) {
+            log.error("Getting certificate extension utils failed", e);
+            throw new RuntimeException("Getting certificate extension utils failed", e);
+        }
+    }
+
+    private X500Name stringToX500Name(String distinguishedName) {
+        if (Strings.isNullOrEmpty(distinguishedName)) {
+            throw new RuntimeException("No DN (distinguished name) must not be null or empty");
+        }
+        try {
+            return new X500Name(RFC4519Style.INSTANCE, distinguishedName);
+        } catch (IllegalArgumentException e) {
+            String message = String.format("Invalid DN (distinguished name) specified for %s certificate.", distinguishedName);
+            throw new RuntimeException(message, e);
+        }
+    }
+
+    private BigInteger generateNextCertificateSerialNumber() {
+        return BigInteger.valueOf(ID_COUNTER.incrementAndGet());
+    }
+}
+// CS-ENFORCE-SINGLE
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificatesIssuerFactory.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificatesIssuerFactory.java
new file mode 100644
index 0000000000..f68ccf6022
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/CertificatesIssuerFactory.java
@@ -0,0 +1,68 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.certificate;
+
+import java.security.Provider;
+import java.util.Optional;
+
+import org.bouncycastle.jce.provider.BouncyCastleProvider;
+
+import static org.opensearch.test.framework.certificate.AlgorithmKit.ecdsaSha256withEcdsa;
+import static org.opensearch.test.framework.certificate.AlgorithmKit.rsaSha256withRsa;
+
+/**
+* The class defines static factory method for class {@link CertificatesIssuer}. Object of class {@link CertificatesIssuer} created by
+* various factory methods differs in terms of cryptographic algorithms used for certificates creation.
+*
+*/
+class CertificatesIssuerFactory {
+
+    private static final int KEY_SIZE = 2048;
+
+    private CertificatesIssuerFactory() {
+
+    }
+
+    private static final Provider DEFAULT_SECURITY_PROVIDER = new BouncyCastleProvider();
+
+    /**
+    * @see {@link #rsaBaseCertificateIssuer(Provider)}
+    */
+    public static CertificatesIssuer rsaBaseCertificateIssuer() {
+        return rsaBaseCertificateIssuer(null);
+    }
+
+    /**
+    * The method creates {@link CertificatesIssuer} which uses RSA algorithm for certificate creation.
+    * @param securityProvider determines cryptographic algorithm implementation, can be <code>null</code>.
+    * @return new instance of {@link CertificatesIssuer}
+    */
+    public static CertificatesIssuer rsaBaseCertificateIssuer(Provider securityProvider) {
+        Provider provider = Optional.ofNullable(securityProvider).orElse(DEFAULT_SECURITY_PROVIDER);
+        return new CertificatesIssuer(provider, rsaSha256withRsa(provider, KEY_SIZE));
+    }
+
+    /**
+    * {@link #rsaBaseCertificateIssuer(Provider)}
+    */
+    public static CertificatesIssuer ecdsaBaseCertificatesIssuer() {
+        return ecdsaBaseCertificatesIssuer(null);
+    }
+
+    /**
+    * It creates {@link CertificatesIssuer} which uses asymmetric cryptography algorithm which relays on elliptic curves.
+    * @param securityProvider determines cryptographic algorithm implementation, can be <code>null</code>.
+    * @return new instance of {@link CertificatesIssuer}
+    */
+    public static CertificatesIssuer ecdsaBaseCertificatesIssuer(Provider securityProvider) {
+        Provider provider = Optional.ofNullable(securityProvider).orElse(DEFAULT_SECURITY_PROVIDER);
+        return new CertificatesIssuer(provider, ecdsaSha256withEcdsa(securityProvider, "P-384"));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/PemConverter.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/PemConverter.java
new file mode 100644
index 0000000000..749ab232bc
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/PemConverter.java
@@ -0,0 +1,119 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+package org.opensearch.test.framework.certificate;
+
+import java.io.IOException;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.security.PrivateKey;
+import java.security.SecureRandom;
+
+import com.google.common.base.Strings;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.bouncycastle.asn1.pkcs.PrivateKeyInfo;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.openssl.PKCS8Generator;
+import org.bouncycastle.openssl.jcajce.JcaPEMWriter;
+import org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8EncryptorBuilder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.OutputEncryptor;
+import org.bouncycastle.util.io.pem.PemGenerationException;
+import org.bouncycastle.util.io.pem.PemObject;
+
+import static java.util.Objects.requireNonNull;
+
+/**
+* The class provides a method useful for converting certificate and private key into PEM format
+* @see <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>
+*/
+class PemConverter {
+
+    private PemConverter() {}
+
+    private static final Logger log = LogManager.getLogger(PemConverter.class);
+    private static final SecureRandom secureRandom = new SecureRandom();
+
+    /**
+    * It converts certificate represented by {@link X509CertificateHolder} object to PEM format
+    * @param certificate is a certificate to convert
+    * @return {@link String} which contains PEM encoded certificate
+    */
+    public static String toPem(X509CertificateHolder certificate) {
+        StringWriter stringWriter = new StringWriter();
+        try (JcaPEMWriter writer = new JcaPEMWriter(stringWriter)) {
+            writer.writeObject(requireNonNull(certificate, "Certificate is required."));
+        } catch (Exception e) {
+            throw new RuntimeException("Cannot write certificate in PEM format", e);
+        }
+        return stringWriter.toString();
+    }
+
+    /**
+    * It converts private key represented by class {@link PrivateKey} to PEM format.
+    * @param privateKey is a private key, cannot be <code>null</code>
+    * @param privateKeyPassword is a password used to encode private key, <code>null</code> for unencrypted private key
+    * @return {@link String} which contains PEM encoded private key
+    */
+    public static String toPem(PrivateKey privateKey, String privateKeyPassword) {
+        try (StringWriter stringWriter = new StringWriter()) {
+            savePrivateKey(stringWriter, requireNonNull(privateKey, "Private key is required."), privateKeyPassword);
+            return stringWriter.toString();
+        } catch (IOException e) {
+            throw new RuntimeException("Cannot convert private key into PEM format.", e);
+        }
+    }
+
+    private static void savePrivateKey(Writer out, PrivateKey privateKey, String privateKeyPassword) {
+        try (JcaPEMWriter writer = new JcaPEMWriter(out)) {
+            writer.writeObject(createPkcs8PrivateKeyPem(privateKey, privateKeyPassword));
+        } catch (Exception e) {
+            log.error("Error while writing private key.", e);
+            throw new RuntimeException("Error while writing private key ", e);
+        }
+    }
+
+    private static PemObject createPkcs8PrivateKeyPem(PrivateKey privateKey, String password) {
+        try {
+            OutputEncryptor outputEncryptor = password == null ? null : getPasswordEncryptor(password);
+            return new PKCS8Generator(PrivateKeyInfo.getInstance(privateKey.getEncoded()), outputEncryptor).generate();
+        } catch (PemGenerationException | OperatorCreationException e) {
+            log.error("Creating PKCS8 private key failed", e);
+            throw new RuntimeException("Creating PKCS8 private key failed", e);
+        }
+    }
+
+    private static OutputEncryptor getPasswordEncryptor(String password) throws OperatorCreationException {
+        if (!Strings.isNullOrEmpty(password)) {
+            JceOpenSSLPKCS8EncryptorBuilder encryptorBuilder = new JceOpenSSLPKCS8EncryptorBuilder(PKCS8Generator.PBE_SHA1_3DES);
+            encryptorBuilder.setRandom(secureRandom);
+            encryptorBuilder.setPassword(password.toCharArray());
+            return encryptorBuilder.build();
+        }
+        return null;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/PublicKeyUsage.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/PublicKeyUsage.java
new file mode 100644
index 0000000000..af37c66001
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/PublicKeyUsage.java
@@ -0,0 +1,75 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.certificate;
+
+import java.util.Objects;
+
+import org.bouncycastle.asn1.x509.KeyPurposeId;
+import org.bouncycastle.asn1.x509.KeyUsage;
+
+// CS-SUPPRESS-SINGLE: RegexpSingleline Extension is used to refer to certificate extensions
+/**
+* The class is associated with certificate extensions related to key usages. These extensions are defined by
+* <a href="https://www.rfc-editor.org/rfc/rfc5280.html">RFC 5280</a> and describes allowed usage of public kay which is embedded in
+* certificate. The class is related to the following extensions:
+* <ol>
+*     <li>Key Usage, defined in section <a href="https://www.rfc-editor.org/rfc/rfc5280.html#section-4.2.1.3">4.2.1.3</a></li>
+*     <li>Extended Key Usage, defined in section <a href="https://www.rfc-editor.org/rfc/rfc5280.html#section-4.2.1.12">4.2.1.12</a></li>
+* </ol>
+*
+* @see <a href="https://www.rfc-editor.org/rfc/rfc5280.html">RFC 5280</a>
+*/
+// CS-ENFORCE-SINGLE
+enum PublicKeyUsage {
+    DIGITAL_SIGNATURE(KeyUsage.digitalSignature),
+    KEY_CERT_SIGN(KeyUsage.keyCertSign),
+    CRL_SIGN(KeyUsage.cRLSign),
+    NON_REPUDIATION(KeyUsage.nonRepudiation),
+    KEY_ENCIPHERMENT(KeyUsage.keyEncipherment),
+
+    SERVER_AUTH(KeyPurposeId.id_kp_serverAuth),
+
+    CLIENT_AUTH(KeyPurposeId.id_kp_clientAuth);
+
+    private final int keyUsage;
+    private final KeyPurposeId id;
+
+    PublicKeyUsage(int keyUsage) {
+        this.keyUsage = keyUsage;
+        this.id = null;
+    }
+
+    PublicKeyUsage(KeyPurposeId id) {
+        this.id = Objects.requireNonNull(id, "Key purpose id is required.");
+        this.keyUsage = 0;
+    }
+
+    boolean isExtendedUsage() {
+        return this.id != null;
+    }
+
+    boolean isNotExtendedUsage() {
+        return this.id == null;
+    }
+
+    int asInt() {
+        if (isExtendedUsage()) {
+            throw new RuntimeException("Integer value is not available for extended key usage");
+        }
+        return keyUsage;
+    }
+
+    KeyPurposeId getKeyPurposeId() {
+        if (isExtendedUsage() == false) {
+            throw new RuntimeException("Key purpose id is not available.");
+        }
+        return id;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/certificate/TestCertificates.java b/src/integrationTest/java/org/opensearch/test/framework/certificate/TestCertificates.java
new file mode 100644
index 0000000000..2dd1dd5eea
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/certificate/TestCertificates.java
@@ -0,0 +1,214 @@
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.certificate;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.CLIENT_AUTH;
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.CRL_SIGN;
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.DIGITAL_SIGNATURE;
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.KEY_CERT_SIGN;
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.KEY_ENCIPHERMENT;
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.NON_REPUDIATION;
+import static org.opensearch.test.framework.certificate.PublicKeyUsage.SERVER_AUTH;
+
+/**
+* It provides TLS certificates required in test cases. The certificates are generated during process of creation objects of the class.
+* The class exposes method which can be used to write certificates and private keys in temporally files.
+*/
+public class TestCertificates {
+
+    private static final Logger log = LogManager.getLogger(TestCertificates.class);
+
+    public static final Integer MAX_NUMBER_OF_NODE_CERTIFICATES = 3;
+
+    private static final String CA_SUBJECT = "DC=com,DC=example,O=Example Com Inc.,OU=Example Com Inc. Root CA,CN=Example Com Inc. Root CA";
+    private static final String ADMIN_DN = "CN=kirk,OU=client,O=client,L=test,C=de";
+    private static final int CERTIFICATE_VALIDITY_DAYS = 365;
+    private static final String CERTIFICATE_FILE_EXT = ".cert";
+    private static final String KEY_FILE_EXT = ".key";
+    private final CertificateData caCertificate;
+    private final CertificateData adminCertificate;
+    private final List<CertificateData> nodeCertificates;
+
+    private final CertificateData ldapCertificate;
+
+    public TestCertificates() {
+        this.caCertificate = createCaCertificate();
+        this.nodeCertificates = IntStream.range(0, MAX_NUMBER_OF_NODE_CERTIFICATES)
+            .mapToObj(this::createNodeCertificate)
+            .collect(Collectors.toList());
+        this.ldapCertificate = createLdapCertificate();
+        this.adminCertificate = createAdminCertificate(ADMIN_DN);
+        log.info("Test certificates successfully generated");
+    }
+
+    private CertificateData createCaCertificate() {
+        CertificateMetadata metadata = CertificateMetadata.basicMetadata(CA_SUBJECT, CERTIFICATE_VALIDITY_DAYS)
+            .withKeyUsage(true, DIGITAL_SIGNATURE, KEY_CERT_SIGN, CRL_SIGN);
+        return CertificatesIssuerFactory.rsaBaseCertificateIssuer().issueSelfSignedCertificate(metadata);
+    }
+
+    public CertificateData createAdminCertificate(String adminDn) {
+        CertificateMetadata metadata = CertificateMetadata.basicMetadata(adminDn, CERTIFICATE_VALIDITY_DAYS)
+            .withKeyUsage(false, DIGITAL_SIGNATURE, NON_REPUDIATION, KEY_ENCIPHERMENT, CLIENT_AUTH);
+        return CertificatesIssuerFactory.rsaBaseCertificateIssuer().issueSignedCertificate(metadata, caCertificate);
+    }
+
+    public CertificateData createSelfSignedCertificate(String distinguishedName) {
+        CertificateMetadata metadata = CertificateMetadata.basicMetadata(distinguishedName, CERTIFICATE_VALIDITY_DAYS);
+        return CertificatesIssuerFactory.rsaBaseCertificateIssuer().issueSelfSignedCertificate(metadata);
+    }
+
+    /**
+    * It returns the most trusted certificate. Certificates for nodes and users are derived from this certificate.
+    * @return file which contains certificate in PEM format, defined by <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>
+    */
+    public File getRootCertificate() {
+        return createTempFile("root", CERTIFICATE_FILE_EXT, caCertificate.certificateInPemFormat());
+    }
+
+    public CertificateData getRootCertificateData() {
+        return caCertificate;
+    }
+
+    /**
+    * Certificate for Open Search node. The certificate is derived from root certificate, returned by method {@link #getRootCertificate()}
+    * @param node is a node index. It has to be less than {@link #MAX_NUMBER_OF_NODE_CERTIFICATES}
+    * @return file which contains certificate in PEM format, defined by <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>
+    */
+    public File getNodeCertificate(int node) {
+        CertificateData certificateData = getNodeCertificateData(node);
+        return createTempFile("node-" + node, CERTIFICATE_FILE_EXT, certificateData.certificateInPemFormat());
+    }
+
+    public CertificateData getNodeCertificateData(int node) {
+        isCorrectNodeNumber(node);
+        return nodeCertificates.get(node);
+    }
+
+    private void isCorrectNodeNumber(int node) {
+        if (node >= MAX_NUMBER_OF_NODE_CERTIFICATES) {
+            String message = String.format(
+                "Cannot get certificate for node %d, number of created certificates for nodes is %d",
+                node,
+                MAX_NUMBER_OF_NODE_CERTIFICATES
+            );
+            throw new RuntimeException(message);
+        }
+    }
+
+    private CertificateData createNodeCertificate(Integer node) {
+        String subject = String.format("DC=de,L=test,O=node,OU=node,CN=node-%d.example.com", node);
+        String domain = String.format("node-%d.example.com", node);
+        CertificateMetadata metadata = CertificateMetadata.basicMetadata(subject, CERTIFICATE_VALIDITY_DAYS)
+            .withKeyUsage(false, DIGITAL_SIGNATURE, NON_REPUDIATION, KEY_ENCIPHERMENT, CLIENT_AUTH, SERVER_AUTH)
+            .withSubjectAlternativeName("1.2.3.4.5.5", List.of(domain, "localhost"), "127.0.0.1");
+        return CertificatesIssuerFactory.rsaBaseCertificateIssuer().issueSignedCertificate(metadata, caCertificate);
+    }
+
+    public CertificateData issueUserCertificate(String organizationUnit, String username) {
+        String subject = String.format("DC=de,L=test,O=users,OU=%s,CN=%s", organizationUnit, username);
+        CertificateMetadata metadata = CertificateMetadata.basicMetadata(subject, CERTIFICATE_VALIDITY_DAYS)
+            .withKeyUsage(false, DIGITAL_SIGNATURE, NON_REPUDIATION, KEY_ENCIPHERMENT, CLIENT_AUTH, SERVER_AUTH);
+        return CertificatesIssuerFactory.rsaBaseCertificateIssuer().issueSignedCertificate(metadata, caCertificate);
+    }
+
+    private CertificateData createLdapCertificate() {
+        String subject = "DC=de,L=test,O=node,OU=node,CN=ldap.example.com";
+        CertificateMetadata metadata = CertificateMetadata.basicMetadata(subject, CERTIFICATE_VALIDITY_DAYS)
+            .withKeyUsage(false, DIGITAL_SIGNATURE, NON_REPUDIATION, KEY_ENCIPHERMENT, CLIENT_AUTH, SERVER_AUTH)
+            .withSubjectAlternativeName(null, List.of("localhost"), "127.0.0.1");
+        return CertificatesIssuerFactory.rsaBaseCertificateIssuer().issueSignedCertificate(metadata, caCertificate);
+    }
+
+    public CertificateData getLdapCertificateData() {
+        return ldapCertificate;
+    }
+
+    /**
+    * It returns private key associated with node certificate returned by method {@link #getNodeCertificate(int)}
+    *
+    * @param node is a node index. It has to be less than {@link #MAX_NUMBER_OF_NODE_CERTIFICATES}
+    * @param privateKeyPassword is a password used to encode private key, can be <code>null</code> to retrieve unencrypted key.
+    * @return file which contains private key encoded in PEM format, defined
+    * by <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>
+    */
+    public File getNodeKey(int node, String privateKeyPassword) {
+        CertificateData certificateData = nodeCertificates.get(node);
+        return createTempFile("node-" + node, KEY_FILE_EXT, certificateData.privateKeyInPemFormat(privateKeyPassword));
+    }
+
+    /**
+    * Certificate which proofs admin user identity. Certificate is derived from root certificate returned by
+    * method {@link #getRootCertificate()}
+    * @return file which contains certificate in PEM format, defined by <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>
+    */
+    public File getAdminCertificate() {
+        return createTempFile("admin", CERTIFICATE_FILE_EXT, adminCertificate.certificateInPemFormat());
+    }
+
+    public CertificateData getAdminCertificateData() {
+        return adminCertificate;
+    }
+
+    /**
+    * It returns private key associated with admin certificate returned by {@link #getAdminCertificate()}.
+    *
+    * @param privateKeyPassword is a password used to encode private key, can be <code>null</code> to retrieve unencrypted key.
+    * @return file which contains private key encoded in PEM format, defined
+    * by <a href="https://www.rfc-editor.org/rfc/rfc1421.txt">RFC 1421</a>
+    */
+    public File getAdminKey(String privateKeyPassword) {
+        return createTempFile("admin", KEY_FILE_EXT, adminCertificate.privateKeyInPemFormat(privateKeyPassword));
+    }
+
+    public String[] getAdminDNs() {
+        return new String[] { ADMIN_DN };
+    }
+
+    private File createTempFile(String name, String suffix, String contents) {
+        try {
+            Path path = Files.createTempFile(name, suffix);
+            Files.writeString(path, contents);
+            return path.toFile();
+        } catch (IOException ex) {
+            throw new RuntimeException("Cannot create temp file with name " + name + " and suffix " + suffix);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/CloseableHttpClientFactory.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/CloseableHttpClientFactory.java
new file mode 100644
index 0000000000..a6a0324b27
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/CloseableHttpClientFactory.java
@@ -0,0 +1,81 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+
+import javax.net.ssl.SSLContext;
+
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.conn.HttpClientConnectionManager;
+import org.apache.http.conn.routing.HttpRoutePlanner;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
+import org.apache.http.conn.ssl.NoopHostnameVerifier;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+
+class CloseableHttpClientFactory {
+
+    private final SSLContext sslContext;
+
+    private final RequestConfig requestConfig;
+
+    private final HttpRoutePlanner routePlanner;
+
+    private final String[] supportedCipherSuites;
+
+    public CloseableHttpClientFactory(
+        SSLContext sslContext,
+        RequestConfig requestConfig,
+        HttpRoutePlanner routePlanner,
+        String[] supportedCipherSuit
+    ) {
+        this.sslContext = Objects.requireNonNull(sslContext, "SSL context is required.");
+        this.requestConfig = requestConfig;
+        this.routePlanner = routePlanner;
+        this.supportedCipherSuites = supportedCipherSuit;
+    }
+
+    public CloseableHttpClient getHTTPClient() {
+
+        final HttpClientBuilder hcb = HttpClients.custom();
+
+        final SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(
+            this.sslContext,
+            /* Uses default supported protocals */ null,
+            supportedCipherSuites,
+            NoopHostnameVerifier.INSTANCE
+        );
+
+        final HttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(
+            RegistryBuilder.<ConnectionSocketFactory>create().register("https", sslsf).build(),
+            /* Uses default connnction factory */ null,
+            /* Uses default scheme port resolver */ null,
+            /* Uses default dns resolver */ null,
+            60,
+            TimeUnit.SECONDS
+        );
+        hcb.setConnectionManager(cm);
+        if (routePlanner != null) {
+            hcb.setRoutePlanner(routePlanner);
+        }
+
+        if (requestConfig != null) {
+            hcb.setDefaultRequestConfig(requestConfig);
+        }
+
+        return hcb.build();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/ClusterManager.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/ClusterManager.java
new file mode 100644
index 0000000000..db786a65e9
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/ClusterManager.java
@@ -0,0 +1,172 @@
+/*
+* Copyright 2015-2017 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.opensearch.index.reindex.ReindexPlugin;
+import org.opensearch.join.ParentJoinPlugin;
+import org.opensearch.percolator.PercolatorPlugin;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.search.aggregations.matrix.MatrixAggregationPlugin;
+import org.opensearch.security.OpenSearchSecurityPlugin;
+import org.opensearch.transport.Netty4Plugin;
+
+import static java.util.Collections.unmodifiableList;
+import static org.opensearch.test.framework.cluster.NodeType.CLIENT;
+import static org.opensearch.test.framework.cluster.NodeType.CLUSTER_MANAGER;
+import static org.opensearch.test.framework.cluster.NodeType.DATA;
+
+public enum ClusterManager {
+    // 3 nodes (1m, 2d)
+    DEFAULT(new NodeSettings(NodeRole.CLUSTER_MANAGER), new NodeSettings(NodeRole.DATA), new NodeSettings(NodeRole.DATA)),
+
+    // 1 node (1md)
+    SINGLENODE(new NodeSettings(NodeRole.CLUSTER_MANAGER, NodeRole.DATA)),
+
+    SINGLE_REMOTE_CLIENT(new NodeSettings(NodeRole.CLUSTER_MANAGER, NodeRole.DATA, NodeRole.REMOTE_CLUSTER_CLIENT)),
+
+    // 4 node (1m, 2d, 1c)
+    CLIENTNODE(
+        new NodeSettings(NodeRole.CLUSTER_MANAGER),
+        new NodeSettings(NodeRole.DATA),
+        new NodeSettings(NodeRole.DATA),
+        new NodeSettings()
+    ),
+
+    THREE_CLUSTER_MANAGERS(
+        new NodeSettings(NodeRole.CLUSTER_MANAGER),
+        new NodeSettings(NodeRole.CLUSTER_MANAGER),
+        new NodeSettings(NodeRole.CLUSTER_MANAGER),
+        new NodeSettings(NodeRole.DATA),
+        new NodeSettings(NodeRole.DATA)
+    );
+
+    private List<NodeSettings> nodeSettings = new LinkedList<>();
+
+    private ClusterManager(NodeSettings... settings) {
+        nodeSettings.addAll(Arrays.asList(settings));
+    }
+
+    public List<NodeSettings> getNodeSettings() {
+        return unmodifiableList(nodeSettings);
+    }
+
+    public List<NodeSettings> getClusterManagerNodeSettings() {
+        return unmodifiableList(nodeSettings.stream().filter(a -> a.containRole(NodeRole.CLUSTER_MANAGER)).collect(Collectors.toList()));
+    }
+
+    public List<NodeSettings> getNonClusterManagerNodeSettings() {
+        return unmodifiableList(nodeSettings.stream().filter(a -> !a.containRole(NodeRole.CLUSTER_MANAGER)).collect(Collectors.toList()));
+    }
+
+    public int getNodes() {
+        return nodeSettings.size();
+    }
+
+    public int getClusterManagerNodes() {
+        return (int) nodeSettings.stream().filter(a -> a.containRole(NodeRole.CLUSTER_MANAGER)).count();
+    }
+
+    public int getDataNodes() {
+        return (int) nodeSettings.stream().filter(a -> a.containRole(NodeRole.DATA)).count();
+    }
+
+    public int getClientNodes() {
+        return (int) nodeSettings.stream().filter(a -> a.isClientNode()).count();
+    }
+
+    public static class NodeSettings {
+
+        private final static List<Class<? extends Plugin>> DEFAULT_PLUGINS = List.of(
+            Netty4Plugin.class,
+            OpenSearchSecurityPlugin.class,
+            MatrixAggregationPlugin.class,
+            ParentJoinPlugin.class,
+            PercolatorPlugin.class,
+            ReindexPlugin.class
+        );
+
+        private final Set<NodeRole> roles;
+        public final List<Class<? extends Plugin>> plugins;
+
+        public NodeSettings(NodeRole... roles) {
+            this(roles.length == 0 ? Collections.emptySet() : EnumSet.copyOf(Arrays.asList(roles)), Collections.emptyList());
+        }
+
+        public NodeSettings(Set<NodeRole> roles, List<Class<? extends Plugin>> additionalPlugins) {
+            super();
+            this.roles = Objects.requireNonNull(roles, "Node roles set must not be null");
+            this.plugins = mergePlugins(additionalPlugins, DEFAULT_PLUGINS);
+        }
+
+        public boolean containRole(NodeRole nodeRole) {
+            return roles.contains(nodeRole);
+        }
+
+        public boolean isClientNode() {
+            return (roles.contains(NodeRole.DATA) == false) && (roles.contains(NodeRole.CLUSTER_MANAGER));
+        }
+
+        NodeType recognizeNodeType() {
+            if (roles.contains(NodeRole.CLUSTER_MANAGER)) {
+                return CLUSTER_MANAGER;
+            } else if (roles.contains(NodeRole.DATA)) {
+                return DATA;
+            } else {
+                return CLIENT;
+            }
+        }
+
+        private List<Class<? extends Plugin>> mergePlugins(Collection<Class<? extends Plugin>>... plugins) {
+            List<Class<? extends Plugin>> mergedPlugins = Arrays.stream(plugins)
+                .filter(Objects::nonNull)
+                .flatMap(Collection::stream)
+                .collect(Collectors.toList());
+            return unmodifiableList(mergedPlugins);
+        }
+
+        @SuppressWarnings("unchecked")
+        public Class<? extends Plugin>[] getPlugins() {
+            return plugins.toArray(new Class[0]);
+        }
+
+        public Class<? extends Plugin>[] pluginsWithAddition(List<Class<? extends Plugin>> additionalPlugins) {
+            return mergePlugins(plugins, additionalPlugins).toArray(Class[]::new);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/ContextHeaderDecoratorClient.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/ContextHeaderDecoratorClient.java
new file mode 100644
index 0000000000..c6ddf3281a
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/ContextHeaderDecoratorClient.java
@@ -0,0 +1,55 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+import java.util.Collections;
+import java.util.Map;
+
+import org.opensearch.core.action.ActionListener;
+import org.opensearch.action.ActionRequest;
+import org.opensearch.core.action.ActionResponse;
+import org.opensearch.action.ActionType;
+import org.opensearch.action.support.ContextPreservingActionListener;
+import org.opensearch.client.Client;
+import org.opensearch.client.FilterClient;
+import org.opensearch.common.util.concurrent.ThreadContext;
+import org.opensearch.common.util.concurrent.ThreadContext.StoredContext;
+
+/**
+* The class adds provided headers into context before sending request via wrapped {@link Client}
+*/
+public class ContextHeaderDecoratorClient extends FilterClient {
+
+    private Map<String, String> headers;
+
+    public ContextHeaderDecoratorClient(Client in, Map<String, String> headers) {
+        super(in);
+        this.headers = headers != null ? headers : Collections.emptyMap();
+    }
+
+    @Override
+    protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
+        ActionType<Response> action,
+        Request request,
+        ActionListener<Response> listener
+    ) {
+
+        ThreadContext threadContext = threadPool().getThreadContext();
+        ContextPreservingActionListener<Response> wrappedListener = new ContextPreservingActionListener<>(
+            threadContext.newRestorableContext(true),
+            listener
+        );
+
+        try (StoredContext ctx = threadContext.stashContext()) {
+            threadContext.putHeader(this.headers);
+            super.doExecute(action, request, wrappedListener);
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalAddressRoutePlanner.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalAddressRoutePlanner.java
new file mode 100644
index 0000000000..09d8b2b6de
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalAddressRoutePlanner.java
@@ -0,0 +1,56 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+import java.net.InetAddress;
+import java.util.Objects;
+
+import org.apache.http.HttpException;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpRequest;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.conn.routing.HttpRoute;
+import org.apache.http.impl.conn.DefaultRoutePlanner;
+import org.apache.http.impl.conn.DefaultSchemePortResolver;
+import org.apache.http.protocol.HttpContext;
+
+/**
+* Class which can be used to bind Apache HTTP client to a particular network interface or its IP address so that the IP address of
+* network interface is used as a source IP address of HTTP request.
+*/
+class LocalAddressRoutePlanner extends DefaultRoutePlanner {
+
+    /**
+    * IP address of one of the local network interfaces.
+    */
+    private final InetAddress localAddress;
+
+    /**
+    * Creates {@link LocalAddressRoutePlanner}
+    * @param localAddress IP address of one of the local network interfaces. Client socket used by Apache HTTP client will be bind to
+    *                        address from this parameter. The parameter must not be <code>null</code>.
+    */
+    public LocalAddressRoutePlanner(InetAddress localAddress) {
+        super(DefaultSchemePortResolver.INSTANCE);
+        this.localAddress = Objects.requireNonNull(localAddress);
+    }
+
+    @Override
+    public HttpRoute determineRoute(final HttpHost host, final HttpRequest request, final HttpContext context) throws HttpException {
+        final HttpClientContext clientContext = HttpClientContext.adapt(context);
+        final RequestConfig localRequsetConfig = RequestConfig.copy(clientContext.getRequestConfig())
+            .setLocalAddress(this.localAddress)
+            .build();
+        clientContext.setRequestConfig(localRequsetConfig);
+
+        return super.determineRoute(host, request, clientContext);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java
new file mode 100644
index 0000000000..64207ead5b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java
@@ -0,0 +1,534 @@
+/*
+* Copyright 2015-2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.rules.ExternalResource;
+
+import org.opensearch.client.Client;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.node.PluginAwareNode;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.security.action.configupdate.ConfigUpdateAction;
+import org.opensearch.security.action.configupdate.ConfigUpdateRequest;
+import org.opensearch.security.action.configupdate.ConfigUpdateResponse;
+import org.opensearch.security.securityconf.impl.CType;
+import org.opensearch.security.support.ConfigConstants;
+import org.opensearch.test.framework.AuditConfiguration;
+import org.opensearch.test.framework.AuthFailureListeners;
+import org.opensearch.test.framework.AuthzDomain;
+import org.opensearch.test.framework.OnBehalfOfConfig;
+import org.opensearch.test.framework.RolesMapping;
+import org.opensearch.test.framework.TestIndex;
+import org.opensearch.test.framework.TestSecurityConfig;
+import org.opensearch.test.framework.TestSecurityConfig.Role;
+import org.opensearch.test.framework.XffConfig;
+import org.opensearch.test.framework.audit.TestRuleAuditLogSink;
+import org.opensearch.test.framework.certificate.CertificateData;
+import org.opensearch.test.framework.certificate.TestCertificates;
+
+/**
+* This class allows to you start and manage a local cluster in an integration test. In contrast to the
+* OpenSearchIntegTestCase class, this class can be used in a composite way and allows the specification
+* of the security plugin configuration.
+*
+* This class can be both used as a JUnit @ClassRule (preferred) or in a try-with-resources block. The latter way should
+* be only sparingly used, as starting a cluster is not a particularly fast operation.
+*/
+public class LocalCluster extends ExternalResource implements AutoCloseable, OpenSearchClientProvider {
+
+    private static final Logger log = LogManager.getLogger(LocalCluster.class);
+
+    public static final String INIT_CONFIGURATION_DIR = "security.default_init.dir";
+
+    protected static final AtomicLong num = new AtomicLong();
+
+    private boolean sslOnly;
+
+    private final List<Class<? extends Plugin>> plugins;
+    private final ClusterManager clusterManager;
+    private final TestSecurityConfig testSecurityConfig;
+    private Settings nodeOverride;
+    private final String clusterName;
+    private final MinimumSecuritySettingsSupplierFactory minimumOpenSearchSettingsSupplierFactory;
+    private final TestCertificates testCertificates;
+    private final List<LocalCluster> clusterDependencies;
+    private final Map<String, LocalCluster> remotes;
+    private volatile LocalOpenSearchCluster localOpenSearchCluster;
+    private final List<TestIndex> testIndices;
+
+    private boolean loadConfigurationIntoIndex;
+
+    private LocalCluster(
+        String clusterName,
+        TestSecurityConfig testSgConfig,
+        boolean sslOnly,
+        Settings nodeOverride,
+        ClusterManager clusterManager,
+        List<Class<? extends Plugin>> plugins,
+        TestCertificates testCertificates,
+        List<LocalCluster> clusterDependencies,
+        Map<String, LocalCluster> remotes,
+        List<TestIndex> testIndices,
+        boolean loadConfigurationIntoIndex,
+        String defaultConfigurationInitDirectory
+    ) {
+        this.plugins = plugins;
+        this.testCertificates = testCertificates;
+        this.clusterManager = clusterManager;
+        this.testSecurityConfig = testSgConfig;
+        this.sslOnly = sslOnly;
+        this.nodeOverride = nodeOverride;
+        this.clusterName = clusterName;
+        this.minimumOpenSearchSettingsSupplierFactory = new MinimumSecuritySettingsSupplierFactory(testCertificates);
+        this.remotes = remotes;
+        this.clusterDependencies = clusterDependencies;
+        this.testIndices = testIndices;
+        this.loadConfigurationIntoIndex = loadConfigurationIntoIndex;
+        if (StringUtils.isNoneBlank(defaultConfigurationInitDirectory)) {
+            System.setProperty(INIT_CONFIGURATION_DIR, defaultConfigurationInitDirectory);
+        }
+    }
+
+    public String getSnapshotDirPath() {
+        return localOpenSearchCluster.getSnapshotDirPath();
+    }
+
+    @Override
+    public void before() throws Throwable {
+        if (localOpenSearchCluster == null) {
+            for (LocalCluster dependency : clusterDependencies) {
+                if (!dependency.isStarted()) {
+                    dependency.before();
+                }
+            }
+
+            for (Map.Entry<String, LocalCluster> entry : remotes.entrySet()) {
+                @SuppressWarnings("resource")
+                InetSocketAddress transportAddress = entry.getValue().localOpenSearchCluster.clusterManagerNode().getTransportAddress();
+                String key = "cluster.remote." + entry.getKey() + ".seeds";
+                String value = transportAddress.getHostString() + ":" + transportAddress.getPort();
+                log.info("Remote cluster '{}' added to configuration with the following seed '{}'", key, value);
+                nodeOverride = Settings.builder().put(nodeOverride).putList(key, value).build();
+            }
+            start();
+        }
+    }
+
+    @Override
+    protected void after() {
+        System.clearProperty(INIT_CONFIGURATION_DIR);
+        close();
+    }
+
+    @Override
+    public void close() {
+        if (localOpenSearchCluster != null && localOpenSearchCluster.isStarted()) {
+            try {
+                localOpenSearchCluster.destroy();
+            } catch (Exception e) {
+                throw new RuntimeException(e);
+            } finally {
+                localOpenSearchCluster = null;
+            }
+        }
+    }
+
+    @Override
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    @Override
+    public InetSocketAddress getHttpAddress() {
+        return localOpenSearchCluster.clientNode().getHttpAddress();
+    }
+
+    public int getHttpPort() {
+        return getHttpAddress().getPort();
+    }
+
+    @Override
+    public InetSocketAddress getTransportAddress() {
+        return localOpenSearchCluster.clientNode().getTransportAddress();
+    }
+
+    /**
+    * Returns a Client object that performs cluster-internal requests. As these requests are regard as cluster-internal,
+    * no authentication is performed and no user-information is attached to these requests. Thus, this client should
+    * be only used for preparing test environments, but not as a test subject.
+    */
+    public Client getInternalNodeClient() {
+        return localOpenSearchCluster.clientNode().getInternalNodeClient();
+    }
+
+    /**
+    * Returns a random node of this cluster.
+    */
+    public PluginAwareNode node() {
+        return this.localOpenSearchCluster.clusterManagerNode().esNode();
+    }
+
+    /**
+    * Returns all nodes of this cluster.
+    */
+    public List<LocalOpenSearchCluster.Node> nodes() {
+        return this.localOpenSearchCluster.getNodes();
+    }
+
+    public LocalOpenSearchCluster.Node getNodeByName(String name) {
+        return this.localOpenSearchCluster.getNodeByName(name);
+    }
+
+    public boolean isStarted() {
+        return localOpenSearchCluster != null;
+    }
+
+    public List<TestSecurityConfig.User> getConfiguredUsers() {
+        return testSecurityConfig.getUsers();
+    }
+
+    public Random getRandom() {
+        return localOpenSearchCluster.getRandom();
+    }
+
+    private void start() {
+        try {
+            NodeSettingsSupplier nodeSettingsSupplier = minimumOpenSearchSettingsSupplierFactory.minimumOpenSearchSettings(
+                sslOnly,
+                nodeOverride
+            );
+            localOpenSearchCluster = new LocalOpenSearchCluster(
+                clusterName,
+                clusterManager,
+                nodeSettingsSupplier,
+                plugins,
+                testCertificates
+            );
+
+            localOpenSearchCluster.start();
+
+            if (loadConfigurationIntoIndex) {
+                initSecurityIndex(testSecurityConfig);
+            }
+
+            try (Client client = getInternalNodeClient()) {
+                for (TestIndex index : this.testIndices) {
+                    index.create(client);
+                }
+            }
+
+        } catch (Exception e) {
+            log.error("Local ES cluster start failed", e);
+            throw new RuntimeException(e);
+        }
+    }
+
+    private void initSecurityIndex(TestSecurityConfig testSecurityConfig) {
+        log.info("Initializing OpenSearch Security index");
+        try (
+            Client client = new ContextHeaderDecoratorClient(
+                this.getInternalNodeClient(),
+                Map.of(ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER, "true")
+            )
+        ) {
+            testSecurityConfig.initIndex(client);
+            triggerConfigurationReload(client);
+        }
+    }
+
+    public void updateUserConfiguration(List<TestSecurityConfig.User> users) {
+        try (
+            Client client = new ContextHeaderDecoratorClient(
+                this.getInternalNodeClient(),
+                Map.of(ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER, "true")
+            )
+        ) {
+            testSecurityConfig.updateInternalUsersConfiguration(client, users);
+            triggerConfigurationReload(client);
+        }
+    }
+
+    private static void triggerConfigurationReload(Client client) {
+        ConfigUpdateResponse configUpdateResponse = client.execute(
+            ConfigUpdateAction.INSTANCE,
+            new ConfigUpdateRequest(CType.lcStringValues().toArray(new String[0]))
+        ).actionGet();
+        if (configUpdateResponse.hasFailures()) {
+            throw new RuntimeException("ConfigUpdateResponse produced failures: " + configUpdateResponse.failures());
+        }
+    }
+
+    public CertificateData getAdminCertificate() {
+        return testCertificates.getAdminCertificateData();
+    }
+
+    public static class Builder {
+
+        private final Settings.Builder nodeOverrideSettingsBuilder = Settings.builder();
+
+        private boolean sslOnly = false;
+        private final List<Class<? extends Plugin>> plugins = new ArrayList<>();
+        private Map<String, LocalCluster> remoteClusters = new HashMap<>();
+        private List<LocalCluster> clusterDependencies = new ArrayList<>();
+        private List<TestIndex> testIndices = new ArrayList<>();
+        private ClusterManager clusterManager = ClusterManager.DEFAULT;
+        private TestSecurityConfig testSecurityConfig = new TestSecurityConfig();
+        private String clusterName = "local_cluster";
+        private TestCertificates testCertificates;
+
+        private boolean loadConfigurationIntoIndex = true;
+
+        private String defaultConfigurationInitDirectory = null;
+
+        public Builder() {}
+
+        public Builder dependsOn(Object object) {
+            // We just want to make sure that the object is already done
+            if (object == null) {
+                throw new IllegalStateException("Dependency not fulfilled");
+            }
+            return this;
+        }
+
+        public Builder clusterManager(ClusterManager clusterManager) {
+            this.clusterManager = clusterManager;
+            return this;
+        }
+
+        /**
+        * Starts a cluster with only one node and thus saves some resources during startup. This shall be only used
+        * for tests where the node interactions are not relevant to the test. An example for this would be
+        * authentication tests, as authentication is always done on the directly connected node.
+        */
+        public Builder singleNode() {
+            this.clusterManager = ClusterManager.SINGLENODE;
+            return this;
+        }
+
+        /**
+        * Specifies the configuration of the security plugin that shall be used by this cluster.
+        */
+        public Builder config(TestSecurityConfig testSecurityConfig) {
+            this.testSecurityConfig = testSecurityConfig;
+            return this;
+        }
+
+        public Builder sslOnly(boolean sslOnly) {
+            this.sslOnly = sslOnly;
+            return this;
+        }
+
+        public Builder nodeSettings(Map<String, Object> settings) {
+            settings.forEach((key, value) -> {
+                if (value instanceof List) {
+                    List<String> values = ((List<?>) value).stream().map(String::valueOf).collect(Collectors.toList());
+                    nodeOverrideSettingsBuilder.putList(key, values);
+                } else {
+                    nodeOverrideSettingsBuilder.put(key, String.valueOf(value));
+                }
+            });
+
+            return this;
+        }
+
+        /**
+        * Adds additional plugins to the cluster
+        */
+        public Builder plugin(Class<? extends Plugin> plugin) {
+            this.plugins.add(plugin);
+
+            return this;
+        }
+
+        public Builder authFailureListeners(AuthFailureListeners listener) {
+            testSecurityConfig.authFailureListeners(listener);
+            return this;
+        }
+
+        /**
+        * Specifies a remote cluster and its name. The remote cluster can be then used in Cross Cluster Search
+        * operations with the specified name.
+        */
+        public Builder remote(String name, LocalCluster anotherCluster) {
+            remoteClusters.put(name, anotherCluster);
+
+            clusterDependencies.add(anotherCluster);
+
+            return this;
+        }
+
+        /**
+        * Specifies test indices that shall be created upon startup of the cluster.
+        */
+        public Builder indices(TestIndex... indices) {
+            this.testIndices.addAll(Arrays.asList(indices));
+            return this;
+        }
+
+        public Builder users(TestSecurityConfig.User... users) {
+            for (TestSecurityConfig.User user : users) {
+                testSecurityConfig.user(user);
+            }
+            return this;
+        }
+
+        public Builder audit(AuditConfiguration auditConfiguration) {
+            if (auditConfiguration != null) {
+                testSecurityConfig.audit(auditConfiguration);
+            }
+            if (auditConfiguration.isEnabled()) {
+                nodeOverrideSettingsBuilder.put("plugins.security.audit.type", TestRuleAuditLogSink.class.getName());
+            } else {
+                nodeOverrideSettingsBuilder.put("plugins.security.audit.type", "noop");
+            }
+            return this;
+        }
+
+        public List<TestSecurityConfig.User> getUsers() {
+            return testSecurityConfig.getUsers();
+        }
+
+        public Builder roles(Role... roles) {
+            testSecurityConfig.roles(roles);
+            return this;
+        }
+
+        public Builder rolesMapping(RolesMapping... mappings) {
+            testSecurityConfig.rolesMapping(mappings);
+            return this;
+        }
+
+        public Builder authc(TestSecurityConfig.AuthcDomain authc) {
+            testSecurityConfig.authc(authc);
+            return this;
+        }
+
+        public Builder authz(AuthzDomain authzDomain) {
+            testSecurityConfig.authz(authzDomain);
+            return this;
+        }
+
+        public Builder clusterName(String clusterName) {
+            this.clusterName = clusterName;
+            return this;
+        }
+
+        public Builder configIndexName(String configIndexName) {
+            testSecurityConfig.configIndexName(configIndexName);
+            return this;
+        }
+
+        public Builder testCertificates(TestCertificates certificates) {
+            this.testCertificates = certificates;
+            return this;
+        }
+
+        public Builder anonymousAuth(boolean anonAuthEnabled) {
+            testSecurityConfig.anonymousAuth(anonAuthEnabled);
+            return this;
+        }
+
+        public Builder xff(XffConfig xffConfig) {
+            testSecurityConfig.xff(xffConfig);
+            return this;
+        }
+
+        public Builder onBehalfOf(OnBehalfOfConfig onBehalfOfConfig) {
+            testSecurityConfig.onBehalfOf(onBehalfOfConfig);
+            return this;
+        }
+
+        public Builder loadConfigurationIntoIndex(boolean loadConfigurationIntoIndex) {
+            this.loadConfigurationIntoIndex = loadConfigurationIntoIndex;
+            return this;
+        }
+
+        public Builder certificates(TestCertificates certificates) {
+            this.testCertificates = certificates;
+            return this;
+        }
+
+        public Builder doNotFailOnForbidden(boolean doNotFailOnForbidden) {
+            testSecurityConfig.doNotFailOnForbidden(doNotFailOnForbidden);
+            return this;
+        }
+
+        public Builder defaultConfigurationInitDirectory(String defaultConfigurationInitDirectory) {
+            this.defaultConfigurationInitDirectory = defaultConfigurationInitDirectory;
+            return this;
+        }
+
+        public LocalCluster build() {
+            try {
+                if (testCertificates == null) {
+                    testCertificates = new TestCertificates();
+                }
+                clusterName += "_" + num.incrementAndGet();
+                Settings settings = nodeOverrideSettingsBuilder.build();
+                return new LocalCluster(
+                    clusterName,
+                    testSecurityConfig,
+                    sslOnly,
+                    settings,
+                    clusterManager,
+                    plugins,
+                    testCertificates,
+                    clusterDependencies,
+                    remoteClusters,
+                    testIndices,
+                    loadConfigurationIntoIndex,
+                    defaultConfigurationInitDirectory
+                );
+            } catch (Exception e) {
+                log.error("Failed to build LocalCluster", e);
+                throw new RuntimeException(e);
+            }
+        }
+
+    }
+
+    @Override
+    public TestCertificates getTestCertificates() {
+        return testCertificates;
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java
new file mode 100644
index 0000000000..c09127e592
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java
@@ -0,0 +1,573 @@
+/*
+* Copyright 2015-2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Objects;
+import java.util.Random;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.net.InetAddresses;
+import org.apache.commons.io.FileUtils;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import org.opensearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.opensearch.client.AdminClient;
+import org.opensearch.client.Client;
+import org.opensearch.cluster.health.ClusterHealthStatus;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
+import org.opensearch.http.BindHttpException;
+import org.opensearch.node.PluginAwareNode;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.test.framework.certificate.TestCertificates;
+import org.opensearch.test.framework.cluster.ClusterManager.NodeSettings;
+import org.opensearch.transport.BindTransportException;
+
+import static java.util.Objects.requireNonNull;
+import static org.junit.Assert.assertEquals;
+import static org.opensearch.test.framework.cluster.NodeType.CLIENT;
+import static org.opensearch.test.framework.cluster.NodeType.CLUSTER_MANAGER;
+import static org.opensearch.test.framework.cluster.NodeType.DATA;
+import static org.opensearch.test.framework.cluster.PortAllocator.TCP;
+
+/**
+* Encapsulates all the logic to start a local OpenSearch cluster - without any configuration of the security plugin.
+*
+* The security plugin configuration is the job of LocalCluster, which uses this class under the hood. Thus, test code
+* for the security plugin should always use LocalCluster.
+*/
+public class LocalOpenSearchCluster {
+
+    static {
+        System.setProperty("opensearch.enforce.bootstrap.checks", "true");
+    }
+
+    private static final Logger log = LogManager.getLogger(LocalOpenSearchCluster.class);
+
+    private final String clusterName;
+    private final ClusterManager clusterManager;
+    private final NodeSettingsSupplier nodeSettingsSupplier;
+    private final List<Class<? extends Plugin>> additionalPlugins;
+    private final List<Node> nodes = new ArrayList<>();
+    private final TestCertificates testCertificates;
+
+    private File clusterHomeDir;
+    private List<String> seedHosts;
+    private List<String> initialClusterManagerHosts;
+    private int retry = 0;
+    private boolean started;
+    private Random random = new Random();
+
+    private File snapshotDir;
+
+    public LocalOpenSearchCluster(
+        String clusterName,
+        ClusterManager clusterManager,
+        NodeSettingsSupplier nodeSettingsSupplier,
+        List<Class<? extends Plugin>> additionalPlugins,
+        TestCertificates testCertificates
+    ) {
+        this.clusterName = clusterName;
+        this.clusterManager = clusterManager;
+        this.nodeSettingsSupplier = nodeSettingsSupplier;
+        this.additionalPlugins = additionalPlugins;
+        this.testCertificates = testCertificates;
+        try {
+            createClusterDirectory(clusterName);
+        } catch (IOException e) {
+            throw new IllegalStateException(e);
+        }
+    }
+
+    public String getSnapshotDirPath() {
+        return snapshotDir.getAbsolutePath();
+    }
+
+    private void createClusterDirectory(String clusterName) throws IOException {
+        this.clusterHomeDir = Files.createTempDirectory("local_cluster_" + clusterName).toFile();
+        log.debug("Cluster home directory '{}'.", clusterHomeDir.getAbsolutePath());
+        this.snapshotDir = new File(this.clusterHomeDir, "snapshots");
+        this.snapshotDir.mkdir();
+    }
+
+    private List<Node> getNodesByType(NodeType nodeType) {
+        return nodes.stream().filter(currentNode -> currentNode.hasAssignedType(nodeType)).collect(Collectors.toList());
+    }
+
+    private long countNodesByType(NodeType nodeType) {
+        return getNodesByType(nodeType).stream().count();
+    }
+
+    public void start() throws Exception {
+        log.info("Starting {}", clusterName);
+
+        int clusterManagerNodeCount = clusterManager.getClusterManagerNodes();
+        int nonClusterManagerNodeCount = clusterManager.getDataNodes() + clusterManager.getClientNodes();
+
+        SortedSet<Integer> clusterManagerNodeTransportPorts = TCP.allocate(
+            clusterName,
+            Math.max(clusterManagerNodeCount, 4),
+            5000 + 42 * 1000 + 300
+        );
+        SortedSet<Integer> clusterManagerNodeHttpPorts = TCP.allocate(clusterName, clusterManagerNodeCount, 5000 + 42 * 1000 + 200);
+
+        this.seedHosts = toHostList(clusterManagerNodeTransportPorts);
+        Set<Integer> clusterManagerPorts = clusterManagerNodeTransportPorts.stream()
+            .limit(clusterManagerNodeCount)
+            .collect(Collectors.toSet());
+        this.initialClusterManagerHosts = toHostList(clusterManagerPorts);
+
+        started = true;
+
+        CompletableFuture<Void> clusterManagerNodeFuture = startNodes(
+            clusterManager.getClusterManagerNodeSettings(),
+            clusterManagerNodeTransportPorts,
+            clusterManagerNodeHttpPorts
+        );
+
+        SortedSet<Integer> nonClusterManagerNodeTransportPorts = TCP.allocate(
+            clusterName,
+            nonClusterManagerNodeCount,
+            5000 + 42 * 1000 + 310
+        );
+        SortedSet<Integer> nonClusterManagerNodeHttpPorts = TCP.allocate(clusterName, nonClusterManagerNodeCount, 5000 + 42 * 1000 + 210);
+
+        CompletableFuture<Void> nonClusterManagerNodeFuture = startNodes(
+            clusterManager.getNonClusterManagerNodeSettings(),
+            nonClusterManagerNodeTransportPorts,
+            nonClusterManagerNodeHttpPorts
+        );
+
+        CompletableFuture.allOf(clusterManagerNodeFuture, nonClusterManagerNodeFuture).join();
+
+        if (isNodeFailedWithPortCollision()) {
+            log.info("Detected port collision for cluster manager node. Retrying.");
+
+            retry();
+            return;
+        }
+
+        log.info("Startup finished. Waiting for GREEN");
+
+        waitForCluster(ClusterHealthStatus.GREEN, TimeValue.timeValueSeconds(10), nodes.size());
+
+        log.info("Started: {}", this);
+
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public boolean isStarted() {
+        return started;
+    }
+
+    public void stop() {
+        List<CompletableFuture<Boolean>> stopFutures = new ArrayList<>();
+        for (Node node : nodes) {
+            stopFutures.add(node.stop(2, TimeUnit.SECONDS));
+        }
+        CompletableFuture.allOf(stopFutures.toArray(size -> new CompletableFuture[size])).join();
+    }
+
+    public void destroy() {
+        stop();
+        nodes.clear();
+
+        try {
+            FileUtils.deleteDirectory(clusterHomeDir);
+        } catch (IOException e) {
+            log.warn("Error while deleting " + clusterHomeDir, e);
+        }
+    }
+
+    public Node clientNode() {
+        return findRunningNode(getNodesByType(CLIENT), getNodesByType(DATA), getNodesByType(CLUSTER_MANAGER));
+    }
+
+    public Node clusterManagerNode() {
+        return findRunningNode(getNodesByType(CLUSTER_MANAGER));
+    }
+
+    public List<Node> getNodes() {
+        return Collections.unmodifiableList(nodes);
+    }
+
+    public Node getNodeByName(String name) {
+        return nodes.stream()
+            .filter(node -> node.getNodeName().equals(name))
+            .findAny()
+            .orElseThrow(
+                () -> new RuntimeException(
+                    "No such node with name: " + name + "; available: " + nodes.stream().map(Node::getNodeName).collect(Collectors.toList())
+                )
+            );
+    }
+
+    private boolean isNodeFailedWithPortCollision() {
+        return nodes.stream().anyMatch(Node::isPortCollision);
+    }
+
+    private void retry() throws Exception {
+        retry++;
+
+        if (retry > 10) {
+            throw new RuntimeException("Detected port collisions for cluster manager node. Giving up.");
+        }
+
+        stop();
+
+        this.nodes.clear();
+        this.seedHosts = null;
+        this.initialClusterManagerHosts = null;
+        createClusterDirectory("local_cluster_" + clusterName + "_retry_" + retry);
+        start();
+    }
+
+    @SafeVarargs
+    private final Node findRunningNode(List<Node> nodes, List<Node>... moreNodes) {
+        for (Node node : nodes) {
+            if (node.isRunning()) {
+                return node;
+            }
+        }
+
+        if (moreNodes != null && moreNodes.length > 0) {
+            for (List<Node> nodesList : moreNodes) {
+                for (Node node : nodesList) {
+                    if (node.isRunning()) {
+                        return node;
+                    }
+                }
+            }
+        }
+
+        return null;
+    }
+
+    private CompletableFuture<Void> startNodes(
+        List<NodeSettings> nodeSettingList,
+        SortedSet<Integer> transportPorts,
+        SortedSet<Integer> httpPorts
+    ) {
+        Iterator<Integer> transportPortIterator = transportPorts.iterator();
+        Iterator<Integer> httpPortIterator = httpPorts.iterator();
+        List<CompletableFuture<StartStage>> futures = new ArrayList<>();
+
+        for (NodeSettings nodeSettings : nodeSettingList) {
+            Node node = new Node(nodeSettings, transportPortIterator.next(), httpPortIterator.next());
+            futures.add(node.start());
+        }
+
+        return CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
+    }
+
+    public void waitForCluster(ClusterHealthStatus status, TimeValue timeout, int expectedNodeCount) throws IOException {
+        Client client = clientNode().getInternalNodeClient();
+
+        log.debug("waiting for cluster state {} and {} nodes", status.name(), expectedNodeCount);
+        AdminClient adminClient = client.admin();
+
+        final ClusterHealthResponse healthResponse = adminClient.cluster()
+            .prepareHealth()
+            .setWaitForStatus(status)
+            .setTimeout(timeout)
+            .setClusterManagerNodeTimeout(timeout)
+            .setWaitForNodes("" + expectedNodeCount)
+            .execute()
+            .actionGet();
+
+        if (log.isDebugEnabled()) {
+            log.debug("Current ClusterState:\n{}", Strings.toString(XContentType.JSON, healthResponse));
+        }
+
+        if (healthResponse.isTimedOut()) {
+            throw new IOException(
+                "cluster state is " + healthResponse.getStatus().name() + " with " + healthResponse.getNumberOfNodes() + " nodes"
+            );
+        } else {
+            log.debug("... cluster state ok {} with {} nodes", healthResponse.getStatus().name(), healthResponse.getNumberOfNodes());
+        }
+
+        assertEquals(expectedNodeCount, healthResponse.getNumberOfNodes());
+
+    }
+
+    @Override
+    public String toString() {
+        String clusterManagerNodes = nodeByTypeToString(CLUSTER_MANAGER);
+        String dataNodes = nodeByTypeToString(DATA);
+        String clientNodes = nodeByTypeToString(CLIENT);
+        return "\nES Cluster "
+            + clusterName
+            + "\ncluster manager nodes: "
+            + clusterManagerNodes
+            + "\n  data nodes: "
+            + dataNodes
+            + "\nclient nodes: "
+            + clientNodes
+            + "\n";
+    }
+
+    private String nodeByTypeToString(NodeType type) {
+        return getNodesByType(type).stream().map(Objects::toString).collect(Collectors.joining(", "));
+    }
+
+    private static List<String> toHostList(Collection<Integer> ports) {
+        return ports.stream().map(port -> "127.0.0.1:" + port).collect(Collectors.toList());
+    }
+
+    private String createNextNodeName(NodeSettings nodeSettings) {
+        NodeType type = nodeSettings.recognizeNodeType();
+        long nodeTypeCount = countNodesByType(type);
+        String nodeType = type.name().toLowerCase(Locale.ROOT);
+        return nodeType + "_" + nodeTypeCount;
+    }
+
+    public class Node implements OpenSearchClientProvider {
+        private final NodeType nodeType;
+        private final String nodeName;
+        private final NodeSettings nodeSettings;
+        private final File nodeHomeDir;
+        private final File dataDir;
+        private final File logsDir;
+        private final int transportPort;
+        private final int httpPort;
+        private final InetSocketAddress httpAddress;
+        private final InetSocketAddress transportAddress;
+        private PluginAwareNode node;
+        private boolean running = false;
+        private boolean portCollision = false;
+
+        Node(NodeSettings nodeSettings, int transportPort, int httpPort) {
+            this.nodeName = createNextNodeName(requireNonNull(nodeSettings, "Node settings are required."));
+            this.nodeSettings = nodeSettings;
+            this.nodeHomeDir = new File(clusterHomeDir, nodeName);
+            this.dataDir = new File(this.nodeHomeDir, "data");
+            this.logsDir = new File(this.nodeHomeDir, "logs");
+            this.transportPort = transportPort;
+            this.httpPort = httpPort;
+            InetAddress hostAddress = InetAddresses.forString("127.0.0.1");
+            this.httpAddress = new InetSocketAddress(hostAddress, httpPort);
+            this.transportAddress = new InetSocketAddress(hostAddress, transportPort);
+
+            this.nodeType = nodeSettings.recognizeNodeType();
+            nodes.add(this);
+        }
+
+        boolean hasAssignedType(NodeType type) {
+            return requireNonNull(type, "Node type is required.").equals(this.nodeType);
+        }
+
+        CompletableFuture<StartStage> start() {
+            CompletableFuture<StartStage> completableFuture = new CompletableFuture<>();
+            Class<? extends Plugin>[] mergedPlugins = nodeSettings.pluginsWithAddition(additionalPlugins);
+            this.node = new PluginAwareNode(nodeSettings.containRole(NodeRole.CLUSTER_MANAGER), getOpenSearchSettings(), mergedPlugins);
+
+            new Thread(new Runnable() {
+
+                @Override
+                public void run() {
+                    try {
+                        node.start();
+                        running = true;
+                        completableFuture.complete(StartStage.INITIALIZED);
+                    } catch (BindTransportException | BindHttpException e) {
+                        log.warn("Port collision detected for {}", this, e);
+                        portCollision = true;
+                        try {
+                            node.close();
+                        } catch (IOException e1) {
+                            log.error(e1);
+                        }
+
+                        node = null;
+                        TCP.reserve(transportPort, httpPort);
+
+                        completableFuture.complete(StartStage.RETRY);
+
+                    } catch (Throwable e) {
+                        log.error("Unable to start {}", this, e);
+                        node = null;
+                        completableFuture.completeExceptionally(e);
+                    }
+                }
+            }).start();
+
+            return completableFuture;
+        }
+
+        public Client getInternalNodeClient() {
+            return node.client();
+        }
+
+        public PluginAwareNode esNode() {
+            return node;
+        }
+
+        public boolean isRunning() {
+            return running;
+        }
+
+        public <X> X getInjectable(Class<X> clazz) {
+            return node.injector().getInstance(clazz);
+        }
+
+        public CompletableFuture<Boolean> stop(long timeout, TimeUnit timeUnit) {
+            return CompletableFuture.supplyAsync(() -> {
+                try {
+                    log.info("Stopping {}", this);
+
+                    running = false;
+
+                    if (node != null) {
+                        node.close();
+                        boolean stopped = node.awaitClose(timeout, timeUnit);
+                        node = null;
+                        return stopped;
+                    } else {
+                        return false;
+                    }
+                } catch (Throwable e) {
+                    String message = "Error while stopping " + this;
+                    log.warn(message, e);
+                    throw new RuntimeException(message, e);
+                }
+            });
+        }
+
+        @Override
+        public String toString() {
+            String state = running ? "RUNNING" : node != null ? "INITIALIZING" : "STOPPED";
+
+            return nodeName + " " + state + " [" + transportPort + ", " + httpPort + "]";
+        }
+
+        public boolean isPortCollision() {
+            return portCollision;
+        }
+
+        public String getNodeName() {
+            return nodeName;
+        }
+
+        @Override
+        public InetSocketAddress getHttpAddress() {
+            return httpAddress;
+        }
+
+        @Override
+        public InetSocketAddress getTransportAddress() {
+            return transportAddress;
+        }
+
+        private Settings getOpenSearchSettings() {
+            Settings settings = Settings.builder()
+                .put(getMinimalOpenSearchSettings())
+                .putList("path.repo", List.of(getSnapshotDirPath()))
+                .build();
+
+            if (nodeSettingsSupplier != null) {
+                // TODO node number
+                return Settings.builder().put(settings).put(nodeSettingsSupplier.get(0)).build();
+            }
+            return settings;
+        }
+
+        private Settings getMinimalOpenSearchSettings() {
+            return Settings.builder()
+                .put("node.name", nodeName)
+                .putList("node.roles", createNodeRolesSettings())
+                .put("cluster.name", clusterName)
+                .put("path.home", nodeHomeDir.toPath())
+                .put("path.data", dataDir.toPath())
+                .put("path.logs", logsDir.toPath())
+                .putList("cluster.initial_cluster_manager_nodes", initialClusterManagerHosts)
+                .put("discovery.initial_state_timeout", "8s")
+                .putList("discovery.seed_hosts", seedHosts)
+                .put("transport.tcp.port", transportPort)
+                .put("http.port", httpPort)
+                .put("cluster.routing.allocation.disk.threshold_enabled", false)
+                .put("discovery.probe.connect_timeout", "10s")
+                .put("discovery.probe.handshake_timeout", "10s")
+                .put("http.cors.enabled", true)
+                .put("gateway.auto_import_dangling_indices", "true")
+                .build();
+        }
+
+        private List<String> createNodeRolesSettings() {
+            final ImmutableList.Builder<String> nodeRolesBuilder = ImmutableList.<String>builder();
+            if (nodeSettings.containRole(NodeRole.DATA)) {
+                nodeRolesBuilder.add("data");
+            }
+            if (nodeSettings.containRole(NodeRole.CLUSTER_MANAGER)) {
+                nodeRolesBuilder.add("cluster_manager");
+            }
+            if (nodeSettings.containRole(NodeRole.REMOTE_CLUSTER_CLIENT)) {
+                nodeRolesBuilder.add("remote_cluster_client");
+            }
+            return nodeRolesBuilder.build();
+        }
+
+        @Override
+        public String getClusterName() {
+            return clusterName;
+        }
+
+        @Override
+        public TestCertificates getTestCertificates() {
+            return testCertificates;
+        }
+    }
+
+    public Random getRandom() {
+        return random;
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java
new file mode 100644
index 0000000000..4ad5f8420e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java
@@ -0,0 +1,84 @@
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import org.opensearch.common.settings.Settings;
+import org.opensearch.security.support.ConfigConstants;
+import org.opensearch.test.framework.certificate.TestCertificates;
+
+public class MinimumSecuritySettingsSupplierFactory {
+
+    private final String PRIVATE_KEY_HTTP_PASSWORD = "aWVV63OJ4qzZyPrBwl2MFny4ZV8lQRZchjL";
+    private final String PRIVATE_KEY_TRANSPORT_PASSWORD = "iWbUv9w79sbd5tcxvSJNfHXS9GhcPCvdw9x";
+
+    private TestCertificates testCertificates;
+
+    public MinimumSecuritySettingsSupplierFactory(TestCertificates testCertificates) {
+        if (testCertificates == null) {
+            throw new IllegalArgumentException("certificates must not be null");
+        }
+        this.testCertificates = testCertificates;
+
+    }
+
+    public NodeSettingsSupplier minimumOpenSearchSettings(boolean sslOnly, Settings other) {
+        return i -> minimumOpenSearchSettingsBuilder(i, sslOnly).put(other).build();
+    }
+
+    private Settings.Builder minimumOpenSearchSettingsBuilder(int node, boolean sslOnly) {
+
+        Settings.Builder builder = Settings.builder();
+
+        builder.put("plugins.security.ssl.transport.pemtrustedcas_filepath", testCertificates.getRootCertificate().getAbsolutePath());
+        builder.put("plugins.security.ssl.transport.pemcert_filepath", testCertificates.getNodeCertificate(node).getAbsolutePath());
+        builder.put(
+            "plugins.security.ssl.transport.pemkey_filepath",
+            testCertificates.getNodeKey(node, PRIVATE_KEY_TRANSPORT_PASSWORD).getAbsolutePath()
+        );
+        builder.put("plugins.security.ssl.transport.pemkey_password", PRIVATE_KEY_TRANSPORT_PASSWORD);
+
+        builder.put("plugins.security.ssl.http.enabled", true);
+        builder.put("plugins.security.ssl.http.pemtrustedcas_filepath", testCertificates.getRootCertificate().getAbsolutePath());
+        builder.put("plugins.security.ssl.http.pemcert_filepath", testCertificates.getNodeCertificate(node).getAbsolutePath());
+        builder.put(
+            "plugins.security.ssl.http.pemkey_filepath",
+            testCertificates.getNodeKey(node, PRIVATE_KEY_HTTP_PASSWORD).getAbsolutePath()
+        );
+        builder.put("plugins.security.ssl.http.pemkey_password", PRIVATE_KEY_HTTP_PASSWORD);
+        if (sslOnly == false) {
+            builder.put(ConfigConstants.SECURITY_BACKGROUND_INIT_IF_SECURITYINDEX_NOT_EXIST, false);
+            builder.putList("plugins.security.authcz.admin_dn", testCertificates.getAdminDNs());
+            builder.put("plugins.security.compliance.salt", "1234567890123456");
+            builder.put("plugins.security.audit.type", "noop");
+            builder.put("plugins.security.background_init_if_securityindex_not_exist", "false");
+        }
+        return builder;
+
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeRole.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeRole.java
new file mode 100644
index 0000000000..0d465fa119
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeRole.java
@@ -0,0 +1,16 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+enum NodeRole {
+    DATA,
+    CLUSTER_MANAGER,
+    REMOTE_CLUSTER_CLIENT
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeSettingsSupplier.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeSettingsSupplier.java
new file mode 100644
index 0000000000..cab3a760ca
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeSettingsSupplier.java
@@ -0,0 +1,34 @@
+/*
+* Copyright 2015-2018 _floragunn_ GmbH
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import org.opensearch.common.settings.Settings;
+
+@FunctionalInterface
+public interface NodeSettingsSupplier {
+    Settings get(int i);
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeType.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeType.java
new file mode 100644
index 0000000000..8ae8941e8d
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/NodeType.java
@@ -0,0 +1,17 @@
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+package org.opensearch.test.framework.cluster;
+
+enum NodeType {
+    CLIENT,
+    DATA,
+    CLUSTER_MANAGER
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/OpenSearchClientProvider.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/OpenSearchClientProvider.java
new file mode 100644
index 0000000000..5e4ac59b92
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/OpenSearchClientProvider.java
@@ -0,0 +1,261 @@
+/*
+* Copyright 2020 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import static org.opensearch.test.framework.cluster.TestRestClientConfiguration.getBasicAuthHeader;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.security.KeyStore;
+import java.security.cert.Certificate;
+import java.security.cert.X509Certificate;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.TrustManagerFactory;
+
+import org.apache.http.Header;
+import org.apache.http.HttpHost;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.conn.ssl.NoopHostnameVerifier;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
+import org.opensearch.client.RestClient;
+import org.opensearch.client.RestClientBuilder;
+import org.opensearch.client.RestHighLevelClient;
+import org.opensearch.security.support.PemKeyReader;
+import org.opensearch.test.framework.certificate.CertificateData;
+import org.opensearch.test.framework.certificate.TestCertificates;
+
+/**
+* OpenSearchClientProvider provides methods to get a REST client for an underlying cluster or node.
+*
+* This interface is implemented by both LocalCluster and LocalOpenSearchCluster.Node. Thus, it is possible to get a
+* REST client for a whole cluster (without choosing the node it is operating on) or to get a REST client for a specific
+* node.
+*/
+public interface OpenSearchClientProvider {
+
+    String getClusterName();
+
+    TestCertificates getTestCertificates();
+
+    InetSocketAddress getHttpAddress();
+
+    InetSocketAddress getTransportAddress();
+
+    default URI getHttpAddressAsURI() {
+        InetSocketAddress address = getHttpAddress();
+        return URI.create("https://" + address.getHostString() + ":" + address.getPort());
+    }
+
+    /**
+    * Returns a REST client that sends requests with basic authentication for the specified User object. Optionally,
+    * additional HTTP headers can be specified which will be sent with each request.
+    *
+    * This method should be usually preferred. The other getRestClient() methods shall be only used for specific
+    * situations.
+    */
+    default TestRestClient getRestClient(UserCredentialsHolder user, CertificateData useCertificateData, Header... headers) {
+        return getRestClient(user.getName(), user.getPassword(), useCertificateData, headers);
+    }
+
+    default TestRestClient getRestClient(UserCredentialsHolder user, Header... headers) {
+        return getRestClient(user.getName(), user.getPassword(), null, headers);
+    }
+
+    default RestHighLevelClient getRestHighLevelClient(String username, String password, Header... headers) {
+        return getRestHighLevelClient(new UserCredentialsHolder() {
+            @Override
+            public String getName() {
+                return username;
+            }
+
+            @Override
+            public String getPassword() {
+                return password;
+            }
+        }, Arrays.asList(headers));
+    }
+
+    default RestHighLevelClient getRestHighLevelClient(UserCredentialsHolder user) {
+        return getRestHighLevelClient(user, Collections.emptySet());
+    }
+
+    default RestHighLevelClient getRestHighLevelClient(UserCredentialsHolder user, Collection<? extends Header> defaultHeaders) {
+
+        BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+        credentialsProvider.setCredentials(new AuthScope(null, -1), new UsernamePasswordCredentials(user.getName(), user.getPassword()));
+
+        return getRestHighLevelClient(credentialsProvider, defaultHeaders);
+    }
+
+    default RestHighLevelClient getRestHighLevelClient(Collection<? extends Header> defaultHeaders) {
+        return getRestHighLevelClient((BasicCredentialsProvider) null, defaultHeaders);
+    }
+
+    default RestHighLevelClient getRestHighLevelClient(
+        BasicCredentialsProvider credentialsProvider,
+        Collection<? extends Header> defaultHeaders
+    ) {
+        RestClientBuilder.HttpClientConfigCallback configCallback = httpClientBuilder -> {
+            Optional.ofNullable(credentialsProvider).ifPresent(httpClientBuilder::setDefaultCredentialsProvider);
+            httpClientBuilder.setSSLStrategy(
+                new SSLIOSessionStrategy(
+                    getSSLContext(),
+                    /* Use default supported protocols */ null,
+                    /* Use default supported cipher suites */ null,
+                    NoopHostnameVerifier.INSTANCE
+                )
+            );
+            httpClientBuilder.setDefaultHeaders(defaultHeaders);
+            return httpClientBuilder;
+        };
+
+        InetSocketAddress httpAddress = getHttpAddress();
+        RestClientBuilder builder = RestClient.builder(new HttpHost(httpAddress.getHostString(), httpAddress.getPort(), "https"))
+            .setHttpClientConfigCallback(configCallback);
+
+        return new RestHighLevelClient(builder);
+    }
+
+    default org.apache.http.impl.client.CloseableHttpClient getClosableHttpClient(String[] supportedCipherSuit) {
+        CloseableHttpClientFactory factory = new CloseableHttpClientFactory(getSSLContext(), null, null, supportedCipherSuit);
+        return factory.getHTTPClient();
+    }
+
+    /**
+    * Returns a REST client that sends requests with basic authentication for the specified user name and password. Optionally,
+    * additional HTTP headers can be specified which will be sent with each request.
+    *
+    * Normally, you should use the method with the User object argument instead. Use this only if you need more
+    * control over username and password - for example, when you want to send a wrong password.
+    */
+    default TestRestClient getRestClient(String user, String password, Header... headers) {
+        return createGenericClientRestClient(new TestRestClientConfiguration().username(user).password(password).headers(headers));
+    }
+
+    default TestRestClient getRestClient(String user, String password, CertificateData useCertificateData, Header... headers) {
+        Header basicAuthHeader = getBasicAuthHeader(user, password);
+        if (headers != null && headers.length > 0) {
+            List<Header> concatenatedHeaders = Stream.concat(Stream.of(basicAuthHeader), Stream.of(headers)).collect(Collectors.toList());
+            return getRestClient(concatenatedHeaders, useCertificateData);
+        }
+        return getRestClient(useCertificateData, basicAuthHeader);
+    }
+
+    /**
+    * Returns a REST client. You can specify additional HTTP headers that will be sent with each request. Use this
+    * method to test non-basic authentication, such as JWT bearer authentication.
+    */
+    default TestRestClient getRestClient(CertificateData useCertificateData, Header... headers) {
+        return getRestClient(Arrays.asList(headers), useCertificateData);
+    }
+
+    default TestRestClient getRestClient(Header... headers) {
+        return getRestClient((CertificateData) null, headers);
+    }
+
+    default TestRestClient getRestClient(List<Header> headers) {
+        return createGenericClientRestClient(new TestRestClientConfiguration().headers(headers));
+
+    }
+
+    default TestRestClient getRestClient(List<Header> headers, CertificateData useCertificateData) {
+        return createGenericClientRestClient(headers, useCertificateData, null);
+    }
+
+    default TestRestClient createGenericClientRestClient(
+        List<Header> headers,
+        CertificateData useCertificateData,
+        InetAddress sourceInetAddress
+    ) {
+        return new TestRestClient(getHttpAddress(), headers, getSSLContext(useCertificateData), sourceInetAddress);
+    }
+
+    default TestRestClient createGenericClientRestClient(TestRestClientConfiguration configuration) {
+        return new TestRestClient(getHttpAddress(), configuration.getHeaders(), getSSLContext(), configuration.getSourceInetAddress());
+    }
+
+    private SSLContext getSSLContext() {
+        return getSSLContext(null);
+    }
+
+    private SSLContext getSSLContext(CertificateData useCertificateData) {
+        X509Certificate[] trustCertificates;
+
+        try {
+            trustCertificates = PemKeyReader.loadCertificatesFromFile(getTestCertificates().getRootCertificate().getAbsolutePath());
+
+            TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
+            KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
+
+            ks.load(null);
+
+            for (int i = 0; i < trustCertificates.length; i++) {
+                ks.setCertificateEntry("caCert-" + i, trustCertificates[i]);
+            }
+            KeyManager[] keyManagers = null;
+            if (useCertificateData != null) {
+                Certificate[] chainOfTrust = { useCertificateData.certificate() };
+                ks.setKeyEntry("admin-certificate", useCertificateData.getKey(), null, chainOfTrust);
+                KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
+                keyManagerFactory.init(ks, null);
+                keyManagers = keyManagerFactory.getKeyManagers();
+            }
+
+            tmf.init(ks);
+
+            SSLContext sslContext = SSLContext.getInstance("TLS");
+
+            sslContext.init(keyManagers, tmf.getTrustManagers(), null);
+            return sslContext;
+
+        } catch (Exception e) {
+            throw new RuntimeException("Error loading root CA ", e);
+        }
+    }
+
+    public interface UserCredentialsHolder {
+        String getName();
+
+        String getPassword();
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java
new file mode 100644
index 0000000000..139378fd22
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java
@@ -0,0 +1,165 @@
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.time.Duration;
+import java.time.Instant;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.opensearch.test.framework.cluster.SocketUtils.SocketType;
+
+/**
+* Helper class that allows you to allocate ports. This helps with avoiding port conflicts when running tests.
+*
+* NOTE: This class shall be only considered as a heuristic; ports allocated by this class are just likely to be unused;
+* however, there is no guarantee that these will be unused. Thus, you still need to be prepared for port-conflicts
+* and retry the procedure in such a case. If you notice a port conflict, you can use the method reserve() to mark the
+* port as used.
+*/
+public class PortAllocator {
+
+    public static final PortAllocator TCP = new PortAllocator(SocketType.TCP, Duration.ofSeconds(100));
+    public static final PortAllocator UDP = new PortAllocator(SocketType.UDP, Duration.ofSeconds(100));
+
+    private final SocketType socketType;
+    private final Duration timeoutDuration;
+    private final Map<Integer, AllocatedPort> allocatedPorts = new HashMap<>();
+
+    PortAllocator(SocketType socketType, Duration timeoutDuration) {
+        this.socketType = socketType;
+        this.timeoutDuration = timeoutDuration;
+    }
+
+    public SortedSet<Integer> allocate(String clientName, int numRequested, int minPort) {
+
+        int startPort = minPort;
+
+        while (!isAvailable(startPort)) {
+            startPort += 10;
+        }
+
+        SortedSet<Integer> foundPorts = new TreeSet<>();
+
+        for (int currentPort = startPort; foundPorts.size() < numRequested
+            && currentPort < SocketUtils.PORT_RANGE_MAX
+            && (currentPort - startPort) < 10000; currentPort++) {
+            if (allocate(clientName, currentPort)) {
+                foundPorts.add(currentPort);
+            }
+        }
+
+        if (foundPorts.size() < numRequested) {
+            throw new IllegalStateException("Could not find " + numRequested + " free ports starting at " + minPort + " for " + clientName);
+        }
+
+        return foundPorts;
+    }
+
+    public int allocateSingle(String clientName, int minPort) {
+
+        int startPort = minPort;
+
+        for (int currentPort = startPort; currentPort < SocketUtils.PORT_RANGE_MAX && (currentPort - startPort) < 10000; currentPort++) {
+            if (allocate(clientName, currentPort)) {
+                return currentPort;
+            }
+        }
+
+        throw new IllegalStateException("Could not find free port starting at " + minPort + " for " + clientName);
+
+    }
+
+    public void reserve(int... ports) {
+
+        for (int port : ports) {
+            allocate("reserved", port);
+        }
+    }
+
+    private boolean isInUse(int port) {
+        boolean result = !this.socketType.isPortAvailable(port);
+
+        if (result) {
+            synchronized (this) {
+                allocatedPorts.put(port, new AllocatedPort("external"));
+            }
+        }
+
+        return result;
+    }
+
+    private boolean isAvailable(int port) {
+        return !isAllocated(port) && !isInUse(port);
+    }
+
+    private synchronized boolean isAllocated(int port) {
+        AllocatedPort allocatedPort = this.allocatedPorts.get(port);
+
+        return allocatedPort != null && !allocatedPort.isTimedOut();
+    }
+
+    private synchronized boolean allocate(String clientName, int port) {
+
+        AllocatedPort allocatedPort = allocatedPorts.get(port);
+
+        if (allocatedPort != null && allocatedPort.isTimedOut()) {
+            allocatedPort = null;
+            allocatedPorts.remove(port);
+        }
+
+        if (allocatedPort == null && !isInUse(port)) {
+            allocatedPorts.put(port, new AllocatedPort(clientName));
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    private class AllocatedPort {
+        final String client;
+        final Instant allocatedAt;
+
+        AllocatedPort(String client) {
+            this.client = client;
+            this.allocatedAt = Instant.now();
+        }
+
+        boolean isTimedOut() {
+            return allocatedAt.plus(timeoutDuration).isBefore(Instant.now());
+        }
+
+        @Override
+        public String toString() {
+            return "AllocatedPort [client=" + client + ", allocatedAt=" + allocatedAt + "]";
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/RestClientException.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/RestClientException.java
new file mode 100644
index 0000000000..0023d65e98
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/RestClientException.java
@@ -0,0 +1,16 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+public class RestClientException extends RuntimeException {
+    RestClientException(String message, Throwable cause) {
+        super(message, cause);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/SearchRequestFactory.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/SearchRequestFactory.java
new file mode 100644
index 0000000000..b40aa9cfcb
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/SearchRequestFactory.java
@@ -0,0 +1,104 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+import org.opensearch.action.search.SearchRequest;
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.action.search.SearchScrollRequest;
+import org.opensearch.common.unit.TimeValue;
+import org.opensearch.index.query.QueryBuilders;
+import org.opensearch.search.aggregations.AggregationBuilders;
+import org.opensearch.search.builder.SearchSourceBuilder;
+import org.opensearch.search.sort.FieldSortBuilder;
+import org.opensearch.search.sort.SortOrder;
+
+import static java.util.concurrent.TimeUnit.MINUTES;
+
+public final class SearchRequestFactory {
+
+    private SearchRequestFactory() {
+
+    }
+
+    public static SearchRequest queryByIdsRequest(String indexName, String... ids) {
+        SearchRequest searchRequest = new SearchRequest(indexName);
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.query(QueryBuilders.idsQuery().addIds(ids));
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchRequest queryStringQueryRequest(String indexName, String queryString) {
+        SearchRequest searchRequest = new SearchRequest(indexName);
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.query(QueryBuilders.queryStringQuery(queryString));
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchRequest queryStringQueryRequest(String[] indicesNames, String queryString) {
+        SearchRequest searchRequest = new SearchRequest(indicesNames);
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.query(QueryBuilders.queryStringQuery(queryString));
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchRequest queryStringQueryRequest(String queryString) {
+        SearchRequest searchRequest = new SearchRequest();
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.query(QueryBuilders.queryStringQuery(queryString));
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchRequest searchRequestWithScroll(String indexName, int pageSize) {
+        SearchRequest searchRequest = new SearchRequest(indexName);
+        searchRequest.scroll(new TimeValue(1, MINUTES));
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.query(QueryBuilders.matchAllQuery());
+        searchSourceBuilder.sort(new FieldSortBuilder("_id").order(SortOrder.ASC));
+        searchSourceBuilder.size(pageSize);
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchRequest searchAll(String... indexNames) {
+        SearchRequest searchRequest = new SearchRequest(indexNames);
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.query(QueryBuilders.matchAllQuery());
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchScrollRequest getSearchScrollRequest(SearchResponse searchResponse) {
+        SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId());
+        scrollRequest.scroll(new TimeValue(1, MINUTES));
+        return scrollRequest;
+    }
+
+    public static SearchRequest averageAggregationRequest(String indexName, String aggregationName, String fieldName) {
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.aggregation(AggregationBuilders.avg(aggregationName).field(fieldName));
+        searchSourceBuilder.size(0);
+        SearchRequest searchRequest = new SearchRequest(indexName);
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+
+    public static SearchRequest statsAggregationRequest(String indexName, String aggregationName, String fieldName) {
+        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+        searchSourceBuilder.aggregation(AggregationBuilders.stats(aggregationName).field(fieldName));
+        searchSourceBuilder.size(0);
+        SearchRequest searchRequest = new SearchRequest(indexName);
+        searchRequest.source(searchSourceBuilder);
+        return searchRequest;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/SocketUtils.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/SocketUtils.java
new file mode 100644
index 0000000000..5895829243
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/SocketUtils.java
@@ -0,0 +1,311 @@
+/*
+* Copyright 2002-2017 the original author or authors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.net.DatagramSocket;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.util.Random;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import javax.net.ServerSocketFactory;
+
+/**
+* Simple utility methods for working with network sockets &mdash; for example,
+* for finding available ports on {@code localhost}.
+*
+* <p>Within this class, a TCP port refers to a port for a {@link ServerSocket};
+* whereas, a UDP port refers to a port for a {@link DatagramSocket}.
+*
+* @author Sam Brannen
+* @author Ben Hale
+* @author Arjen Poutsma
+* @author Gunnar Hillert
+* @author Gary Russell
+* @since 4.0
+*/
+public class SocketUtils {
+
+    /**
+    * The default minimum value for port ranges used when finding an available
+    * socket port.
+    */
+    public static final int PORT_RANGE_MIN = 1024;
+
+    /**
+    * The default maximum value for port ranges used when finding an available
+    * socket port.
+    */
+    public static final int PORT_RANGE_MAX = 65535;
+
+    private static final Random random = new Random(System.currentTimeMillis());
+
+    /**
+    * Although {@code SocketUtils} consists solely of static utility methods,
+    * this constructor is intentionally {@code public}.
+    * <h4>Rationale</h4>
+    * <p>Static methods from this class may be invoked from within XML
+    * configuration files using the Spring Expression Language (SpEL) and the
+    * following syntax.
+    * <pre><code>&lt;bean id="bean1" ... p:port="#{T(org.springframework.util.SocketUtils).findAvailableTcpPort(12000)}" /&gt;</code></pre>
+    * If this constructor were {@code private}, you would be required to supply
+    * the fully qualified class name to SpEL's {@code T()} function for each usage.
+    * Thus, the fact that this constructor is {@code public} allows you to reduce
+    * boilerplate configuration with SpEL as can be seen in the following example.
+    * <pre><code>&lt;bean id="socketUtils" class="org.springframework.util.SocketUtils" /&gt;
+    * &lt;bean id="bean1" ... p:port="#{socketUtils.findAvailableTcpPort(12000)}" /&gt;
+    * &lt;bean id="bean2" ... p:port="#{socketUtils.findAvailableTcpPort(30000)}" /&gt;</code></pre>
+    */
+    public SocketUtils() {
+        /* no-op */
+    }
+
+    /**
+    * Find an available TCP port randomly selected from the range
+    * [{@value #PORT_RANGE_MIN}, {@value #PORT_RANGE_MAX}].
+    * @return an available TCP port number
+    * @throws IllegalStateException if no available port could be found
+    */
+    public static int findAvailableTcpPort() {
+        return findAvailableTcpPort(PORT_RANGE_MIN);
+    }
+
+    /**
+    * Find an available TCP port randomly selected from the range
+    * [{@code minPort}, {@value #PORT_RANGE_MAX}].
+    * @param minPort the minimum port number
+    * @return an available TCP port number
+    * @throws IllegalStateException if no available port could be found
+    */
+    public static int findAvailableTcpPort(int minPort) {
+        return findAvailableTcpPort(minPort, PORT_RANGE_MAX);
+    }
+
+    /**
+    * Find an available TCP port randomly selected from the range
+    * [{@code minPort}, {@code maxPort}].
+    * @param minPort the minimum port number
+    * @param maxPort the maximum port number
+    * @return an available TCP port number
+    * @throws IllegalStateException if no available port could be found
+    */
+    public static int findAvailableTcpPort(int minPort, int maxPort) {
+        return SocketType.TCP.findAvailablePort(minPort, maxPort);
+    }
+
+    /**
+    * Find the requested number of available TCP ports, each randomly selected
+    * from the range [{@value #PORT_RANGE_MIN}, {@value #PORT_RANGE_MAX}].
+    * @param numRequested the number of available ports to find
+    * @return a sorted set of available TCP port numbers
+    * @throws IllegalStateException if the requested number of available ports could not be found
+    */
+    public static SortedSet<Integer> findAvailableTcpPorts(int numRequested) {
+        return findAvailableTcpPorts(numRequested, PORT_RANGE_MIN, PORT_RANGE_MAX);
+    }
+
+    /**
+    * Find the requested number of available TCP ports, each randomly selected
+    * from the range [{@code minPort}, {@code maxPort}].
+    * @param numRequested the number of available ports to find
+    * @param minPort the minimum port number
+    * @param maxPort the maximum port number
+    * @return a sorted set of available TCP port numbers
+    * @throws IllegalStateException if the requested number of available ports could not be found
+    */
+    public static SortedSet<Integer> findAvailableTcpPorts(int numRequested, int minPort, int maxPort) {
+        return SocketType.TCP.findAvailablePorts(numRequested, minPort, maxPort);
+    }
+
+    /**
+    * Find an available UDP port randomly selected from the range
+    * [{@value #PORT_RANGE_MIN}, {@value #PORT_RANGE_MAX}].
+    * @return an available UDP port number
+    * @throws IllegalStateException if no available port could be found
+    */
+    public static int findAvailableUdpPort() {
+        return findAvailableUdpPort(PORT_RANGE_MIN);
+    }
+
+    /**
+    * Find an available UDP port randomly selected from the range
+    * [{@code minPort}, {@value #PORT_RANGE_MAX}].
+    * @param minPort the minimum port number
+    * @return an available UDP port number
+    * @throws IllegalStateException if no available port could be found
+    */
+    public static int findAvailableUdpPort(int minPort) {
+        return findAvailableUdpPort(minPort, PORT_RANGE_MAX);
+    }
+
+    /**
+    * Find an available UDP port randomly selected from the range
+    * [{@code minPort}, {@code maxPort}].
+    * @param minPort the minimum port number
+    * @param maxPort the maximum port number
+    * @return an available UDP port number
+    * @throws IllegalStateException if no available port could be found
+    */
+    public static int findAvailableUdpPort(int minPort, int maxPort) {
+        return SocketType.UDP.findAvailablePort(minPort, maxPort);
+    }
+
+    /**
+    * Find the requested number of available UDP ports, each randomly selected
+    * from the range [{@value #PORT_RANGE_MIN}, {@value #PORT_RANGE_MAX}].
+    * @param numRequested the number of available ports to find
+    * @return a sorted set of available UDP port numbers
+    * @throws IllegalStateException if the requested number of available ports could not be found
+    */
+    public static SortedSet<Integer> findAvailableUdpPorts(int numRequested) {
+        return findAvailableUdpPorts(numRequested, PORT_RANGE_MIN, PORT_RANGE_MAX);
+    }
+
+    /**
+    * Find the requested number of available UDP ports, each randomly selected
+    * from the range [{@code minPort}, {@code maxPort}].
+    * @param numRequested the number of available ports to find
+    * @param minPort the minimum port number
+    * @param maxPort the maximum port number
+    * @return a sorted set of available UDP port numbers
+    * @throws IllegalStateException if the requested number of available ports could not be found
+    */
+    public static SortedSet<Integer> findAvailableUdpPorts(int numRequested, int minPort, int maxPort) {
+        return SocketType.UDP.findAvailablePorts(numRequested, minPort, maxPort);
+    }
+
+    public enum SocketType {
+
+        TCP {
+            @Override
+            protected boolean isPortAvailable(int port) {
+                try {
+                    ServerSocket serverSocket = ServerSocketFactory.getDefault()
+                        .createServerSocket(port, 1, InetAddress.getByName("localhost"));
+                    serverSocket.close();
+                    return true;
+                } catch (Exception ex) {
+                    return false;
+                }
+            }
+        },
+
+        UDP {
+            @Override
+            protected boolean isPortAvailable(int port) {
+                try {
+                    DatagramSocket socket = new DatagramSocket(port, InetAddress.getByName("localhost"));
+                    socket.close();
+                    return true;
+                } catch (Exception ex) {
+                    return false;
+                }
+            }
+        };
+
+        /**
+        * Determine if the specified port for this {@code SocketType} is
+        * currently available on {@code localhost}.
+        */
+        protected abstract boolean isPortAvailable(int port);
+
+        /**
+        * Find a pseudo-random port number within the range
+        * [{@code minPort}, {@code maxPort}].
+        * @param minPort the minimum port number
+        * @param maxPort the maximum port number
+        * @return a random port number within the specified range
+        */
+        private int findRandomPort(int minPort, int maxPort) {
+            int portRange = maxPort - minPort;
+            return minPort + random.nextInt(portRange + 1);
+        }
+
+        /**
+        * Find an available port for this {@code SocketType}, randomly selected
+        * from the range [{@code minPort}, {@code maxPort}].
+        * @param minPort the minimum port number
+        * @param maxPort the maximum port number
+        * @return an available port number for this socket type
+        * @throws IllegalStateException if no available port could be found
+        */
+        int findAvailablePort(int minPort, int maxPort) {
+            // Assert.assertTrue(minPort > 0, "'minPort' must be greater than 0");
+            // Assert.isTrue(maxPort >= minPort, "'maxPort' must be greater than or equal to 'minPort'");
+            // Assert.isTrue(maxPort <= PORT_RANGE_MAX, "'maxPort' must be less than or equal to " + PORT_RANGE_MAX);
+
+            int portRange = maxPort - minPort;
+            int candidatePort;
+            int searchCounter = 0;
+            do {
+                if (searchCounter > portRange) {
+                    throw new IllegalStateException(
+                        String.format(
+                            "Could not find an available %s port in the range [%d, %d] after %d attempts",
+                            name(),
+                            minPort,
+                            maxPort,
+                            searchCounter
+                        )
+                    );
+                }
+                candidatePort = findRandomPort(minPort, maxPort);
+                searchCounter++;
+            } while (!isPortAvailable(candidatePort));
+
+            return candidatePort;
+        }
+
+        /**
+        * Find the requested number of available ports for this {@code SocketType},
+        * each randomly selected from the range [{@code minPort}, {@code maxPort}].
+        * @param numRequested the number of available ports to find
+        * @param minPort the minimum port number
+        * @param maxPort the maximum port number
+        * @return a sorted set of available port numbers for this socket type
+        * @throws IllegalStateException if the requested number of available ports could not be found
+        */
+        SortedSet<Integer> findAvailablePorts(int numRequested, int minPort, int maxPort) {
+            SortedSet<Integer> availablePorts = new TreeSet<>();
+            int attemptCount = 0;
+            while ((++attemptCount <= numRequested + 100) && availablePorts.size() < numRequested) {
+                availablePorts.add(findAvailablePort(minPort, maxPort));
+            }
+
+            if (availablePorts.size() != numRequested) {
+                throw new IllegalStateException(
+                    String.format("Could not find %d available %s ports in the range [%d, %d]", numRequested, name(), minPort, maxPort)
+                );
+            }
+
+            return availablePorts;
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/SocketUtilsTests.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/SocketUtilsTests.java
new file mode 100644
index 0000000000..fb298c5283
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/SocketUtilsTests.java
@@ -0,0 +1,207 @@
+/*
+* Copyright 2002-2020 the original author or authors.
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*      https://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.net.DatagramSocket;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.util.SortedSet;
+
+import javax.net.ServerSocketFactory;
+
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.endsWith;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.startsWith;
+import static org.junit.Assert.assertThrows;
+import static org.opensearch.test.framework.cluster.SocketUtils.PORT_RANGE_MAX;
+import static org.opensearch.test.framework.cluster.SocketUtils.PORT_RANGE_MIN;
+
+/**
+* Unit tests for {@link SocketUtils}.
+*
+* @author Sam Brannen
+* @author Gary Russell
+*/
+public class SocketUtilsTests {
+
+    // TCP
+
+    @Test
+    public void findAvailableTcpPort() {
+        int port = SocketUtils.findAvailableTcpPort();
+        assertPortInRange(port, PORT_RANGE_MIN, PORT_RANGE_MAX);
+    }
+
+    @Test
+    public void findAvailableTcpPortWithMinPortEqualToMaxPort() {
+        int minMaxPort = SocketUtils.findAvailableTcpPort();
+        int port = SocketUtils.findAvailableTcpPort(minMaxPort, minMaxPort);
+        assertThat(port, equalTo(minMaxPort));
+    }
+
+    @Test
+    public void findAvailableTcpPortWhenPortOnLoopbackInterfaceIsNotAvailable() throws Exception {
+        int port = SocketUtils.findAvailableTcpPort();
+        try (ServerSocket socket = ServerSocketFactory.getDefault().createServerSocket(port, 1, InetAddress.getByName("localhost"))) {
+            assertThat(socket, notNullValue());
+            // will only look for the exact port
+            IllegalStateException exception = assertThrows(IllegalStateException.class, () -> SocketUtils.findAvailableTcpPort(port, port));
+            assertThat(exception.getMessage(), startsWith("Could not find an available TCP port"));
+            assertThat(exception.getMessage(), endsWith("after 1 attempts"));
+        }
+    }
+
+    @Test
+    public void findAvailableTcpPortWithMin() {
+        int port = SocketUtils.findAvailableTcpPort(50000);
+        assertPortInRange(port, 50000, PORT_RANGE_MAX);
+    }
+
+    @Test
+    public void findAvailableTcpPortInRange() {
+        int minPort = 20000;
+        int maxPort = minPort + 1000;
+        int port = SocketUtils.findAvailableTcpPort(minPort, maxPort);
+        assertPortInRange(port, minPort, maxPort);
+    }
+
+    @Test
+    public void find4AvailableTcpPorts() {
+        findAvailableTcpPorts(4);
+    }
+
+    @Test
+    public void find50AvailableTcpPorts() {
+        findAvailableTcpPorts(50);
+    }
+
+    @Test
+    public void find4AvailableTcpPortsInRange() {
+        findAvailableTcpPorts(4, 30000, 35000);
+    }
+
+    @Test
+    public void find50AvailableTcpPortsInRange() {
+        findAvailableTcpPorts(50, 40000, 45000);
+    }
+
+    // UDP
+
+    @Test
+    public void findAvailableUdpPort() {
+        int port = SocketUtils.findAvailableUdpPort();
+        assertPortInRange(port, PORT_RANGE_MIN, PORT_RANGE_MAX);
+    }
+
+    @Test
+    public void findAvailableUdpPortWhenPortOnLoopbackInterfaceIsNotAvailable() throws Exception {
+        int port = SocketUtils.findAvailableUdpPort();
+        try (DatagramSocket socket = new DatagramSocket(port, InetAddress.getByName("localhost"))) {
+            assertThat(socket, notNullValue());
+            // will only look for the exact port
+            IllegalStateException exception = assertThrows(IllegalStateException.class, () -> SocketUtils.findAvailableUdpPort(port, port));
+            assertThat(exception.getMessage(), startsWith("Could not find an available UDP port"));
+            assertThat(exception.getMessage(), endsWith("after 1 attempts"));
+        }
+    }
+
+    @Test
+    public void findAvailableUdpPortWithMin() {
+        int port = SocketUtils.findAvailableUdpPort(50000);
+        assertPortInRange(port, 50000, PORT_RANGE_MAX);
+    }
+
+    @Test
+    public void findAvailableUdpPortInRange() {
+        int minPort = 20000;
+        int maxPort = minPort + 1000;
+        int port = SocketUtils.findAvailableUdpPort(minPort, maxPort);
+        assertPortInRange(port, minPort, maxPort);
+    }
+
+    @Test
+    public void find4AvailableUdpPorts() {
+        findAvailableUdpPorts(4);
+    }
+
+    @Test
+    public void find50AvailableUdpPorts() {
+        findAvailableUdpPorts(50);
+    }
+
+    @Test
+    public void find4AvailableUdpPortsInRange() {
+        findAvailableUdpPorts(4, 30000, 35000);
+    }
+
+    @Test
+    public void find50AvailableUdpPortsInRange() {
+        findAvailableUdpPorts(50, 40000, 45000);
+    }
+
+    // Helpers
+
+    private void findAvailableTcpPorts(int numRequested) {
+        SortedSet<Integer> ports = SocketUtils.findAvailableTcpPorts(numRequested);
+        assertAvailablePorts(ports, numRequested, PORT_RANGE_MIN, PORT_RANGE_MAX);
+    }
+
+    private void findAvailableTcpPorts(int numRequested, int minPort, int maxPort) {
+        SortedSet<Integer> ports = SocketUtils.findAvailableTcpPorts(numRequested, minPort, maxPort);
+        assertAvailablePorts(ports, numRequested, minPort, maxPort);
+    }
+
+    private void findAvailableUdpPorts(int numRequested) {
+        SortedSet<Integer> ports = SocketUtils.findAvailableUdpPorts(numRequested);
+        assertAvailablePorts(ports, numRequested, PORT_RANGE_MIN, PORT_RANGE_MAX);
+    }
+
+    private void findAvailableUdpPorts(int numRequested, int minPort, int maxPort) {
+        SortedSet<Integer> ports = SocketUtils.findAvailableUdpPorts(numRequested, minPort, maxPort);
+        assertAvailablePorts(ports, numRequested, minPort, maxPort);
+    }
+
+    private void assertPortInRange(int port, int minPort, int maxPort) {
+        assertThat("port [" + port + "] >= " + minPort, port, greaterThanOrEqualTo(minPort));
+        assertThat("port [" + port + "] <= " + maxPort, port, lessThanOrEqualTo(maxPort));
+    }
+
+    private void assertAvailablePorts(SortedSet<Integer> ports, int numRequested, int minPort, int maxPort) {
+        assertThat("number of ports requested", ports.size(), equalTo(numRequested));
+        for (int port : ports) {
+            assertPortInRange(port, minPort, maxPort);
+        }
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/StartStage.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/StartStage.java
new file mode 100644
index 0000000000..d5dce0056a
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/StartStage.java
@@ -0,0 +1,15 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+enum StartStage {
+    INITIALIZED,
+    RETRY
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/TestRestClient.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/TestRestClient.java
new file mode 100644
index 0000000000..9daa432484
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/TestRestClient.java
@@ -0,0 +1,457 @@
+/*
+* Copyright 2021 floragunn GmbH
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*
+*/
+
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.cluster;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import java.util.stream.StreamSupport;
+
+import javax.net.ssl.SSLContext;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpHead;
+import org.apache.http.client.methods.HttpOptions;
+import org.apache.http.client.methods.HttpPatch;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.conn.routing.HttpRoutePlanner;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import org.opensearch.common.xcontent.XContentType;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.xcontent.ToXContentObject;
+import org.opensearch.security.DefaultObjectMapper;
+
+import static java.lang.String.format;
+import static java.util.Objects.requireNonNull;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.notNullValue;
+
+/**
+* A OpenSearch REST client, which is tailored towards use in integration tests. Instances of this class can be
+* obtained via the OpenSearchClientProvider interface, which is implemented by LocalCluster and Node.
+*
+* Usually, an instance of this class sends constant authentication headers which are defined when obtaining the
+* instance from OpenSearchClientProvider.
+*/
+public class TestRestClient implements AutoCloseable {
+
+    private static final Logger log = LogManager.getLogger(TestRestClient.class);
+
+    private boolean enableHTTPClientSSL = true;
+    private boolean sendHTTPClientCertificate = false;
+    private InetSocketAddress nodeHttpAddress;
+    private RequestConfig requestConfig;
+    private List<Header> headers = new ArrayList<>();
+    private Header CONTENT_TYPE_JSON = new BasicHeader("Content-Type", "application/json");
+    private SSLContext sslContext;
+
+    private final InetAddress sourceInetAddress;
+
+    public TestRestClient(InetSocketAddress nodeHttpAddress, List<Header> headers, SSLContext sslContext, InetAddress sourceInetAddress) {
+        this.nodeHttpAddress = nodeHttpAddress;
+        this.headers.addAll(headers);
+        this.sslContext = sslContext;
+        this.sourceInetAddress = sourceInetAddress;
+    }
+
+    public HttpResponse get(String path, List<NameValuePair> queryParameters, Header... headers) {
+        try {
+            URI uri = new URIBuilder(getHttpServerUri()).setPath(path).addParameters(queryParameters).build();
+            return executeRequest(new HttpGet(uri), headers);
+        } catch (URISyntaxException ex) {
+            throw new RuntimeException("Incorrect URI syntax", ex);
+        }
+    }
+
+    public HttpResponse get(String path, Header... headers) {
+        return get(path, Collections.emptyList(), headers);
+    }
+
+    public HttpResponse getAuthInfo(Header... headers) {
+        return executeRequest(new HttpGet(getHttpServerUri() + "/_opendistro/_security/authinfo?pretty"), headers);
+    }
+
+    public HttpResponse getOnBehalfOfToken(String jsonData, Header... headers) {
+        try {
+            HttpPost httpPost = new HttpPost(
+                new URIBuilder(getHttpServerUri() + "/_plugins/_security/api/generateonbehalfoftoken?pretty").build()
+            );
+            httpPost.setEntity(toStringEntity(jsonData));
+            return executeRequest(httpPost, mergeHeaders(CONTENT_TYPE_JSON, headers));
+        } catch (URISyntaxException ex) {
+            throw new RuntimeException("Incorrect URI syntax", ex);
+        }
+    }
+
+    public HttpResponse changeInternalUserPassword(String jsonData, Header... headers) {
+        try {
+            HttpPut httpPut = new HttpPut(new URIBuilder(getHttpServerUri() + "/_plugins/_security/api/account?pretty").build());
+            httpPut.setEntity(toStringEntity(jsonData));
+            return executeRequest(httpPut, mergeHeaders(CONTENT_TYPE_JSON, headers));
+        } catch (URISyntaxException ex) {
+            throw new RuntimeException("Incorrect URI syntax", ex);
+        }
+    }
+
+    public void assertCorrectCredentials(String expectedUserName) {
+        HttpResponse response = getAuthInfo();
+        assertThat(response, notNullValue());
+        response.assertStatusCode(200);
+        String username = response.getTextFromJsonBody("/user_name");
+        String message = String.format("Expected user name is '%s', but was '%s'", expectedUserName, username);
+        assertThat(message, username, equalTo(expectedUserName));
+    }
+
+    public HttpResponse head(String path, Header... headers) {
+        return executeRequest(new HttpHead(getHttpServerUri() + "/" + path), headers);
+    }
+
+    public HttpResponse options(String path, Header... headers) {
+        return executeRequest(new HttpOptions(getHttpServerUri() + "/" + path), headers);
+    }
+
+    public HttpResponse putJson(String path, String body, Header... headers) {
+        HttpPut uriRequest = new HttpPut(getHttpServerUri() + "/" + path);
+        uriRequest.setEntity(toStringEntity(body));
+        return executeRequest(uriRequest, mergeHeaders(CONTENT_TYPE_JSON, headers));
+    }
+
+    private StringEntity toStringEntity(String body) {
+        try {
+            return new StringEntity(body);
+        } catch (UnsupportedEncodingException uee) {
+            throw new RuntimeException(uee);
+        }
+    }
+
+    public HttpResponse putJson(String path, ToXContentObject body) {
+        return putJson(path, Strings.toString(XContentType.JSON, body));
+    }
+
+    public HttpResponse put(String path) {
+        HttpPut uriRequest = new HttpPut(getHttpServerUri() + "/" + path);
+        return executeRequest(uriRequest);
+    }
+
+    public HttpResponse delete(String path, Header... headers) {
+        return executeRequest(new HttpDelete(getHttpServerUri() + "/" + path), headers);
+    }
+
+    public HttpResponse postJson(String path, String body, Header... headers) {
+        HttpPost uriRequest = new HttpPost(getHttpServerUri() + "/" + path);
+        uriRequest.setEntity(toStringEntity(body));
+        return executeRequest(uriRequest, mergeHeaders(CONTENT_TYPE_JSON, headers));
+    }
+
+    public HttpResponse postJson(String path, ToXContentObject body) {
+        return postJson(path, Strings.toString(XContentType.JSON, body));
+    }
+
+    public HttpResponse post(String path) {
+        HttpPost uriRequest = new HttpPost(getHttpServerUri() + "/" + path);
+        return executeRequest(uriRequest);
+    }
+
+    public HttpResponse patch(String path, String body) {
+        HttpPatch uriRequest = new HttpPatch(getHttpServerUri() + "/" + path);
+        uriRequest.setEntity(toStringEntity(body));
+        return executeRequest(uriRequest, CONTENT_TYPE_JSON);
+    }
+
+    public HttpResponse assignRoleToUser(String username, String roleName) {
+        Objects.requireNonNull(roleName, "Role name is required");
+        Objects.requireNonNull(username, "User name is required");
+        String body = String.format("[{\"op\":\"add\",\"path\":\"/opendistro_security_roles\",\"value\":[\"%s\"]}]", roleName);
+        return patch("_plugins/_security/api/internalusers/" + username, body);
+    }
+
+    public HttpResponse createRole(String roleName, ToXContentObject role) {
+        Objects.requireNonNull(roleName, "Role name is required");
+        Objects.requireNonNull(role, "Role is required");
+        return putJson("_plugins/_security/api/roles/" + roleName, role);
+    }
+
+    public HttpResponse createUser(String userName, ToXContentObject user) {
+        Objects.requireNonNull(userName, "User name is required");
+        Objects.requireNonNull(user, "User is required");
+        return putJson("_plugins/_security/api/internalusers/" + userName, user);
+    }
+
+    public HttpResponse executeRequest(HttpUriRequest uriRequest, Header... requestSpecificHeaders) {
+        try (CloseableHttpClient httpClient = getHTTPClient()) {
+
+            if (requestSpecificHeaders != null && requestSpecificHeaders.length > 0) {
+                for (int i = 0; i < requestSpecificHeaders.length; i++) {
+                    Header h = requestSpecificHeaders[i];
+                    uriRequest.addHeader(h);
+                }
+            }
+
+            for (Header header : headers) {
+                uriRequest.addHeader(header);
+            }
+
+            HttpResponse res = new HttpResponse(httpClient.execute(uriRequest));
+            log.debug(res.getBody());
+            return res;
+        } catch (IOException e) {
+            throw new RestClientException("Error occured during HTTP request execution", e);
+        }
+    }
+
+    public void createRoleMapping(String backendRoleName, String roleName) {
+        requireNonNull(backendRoleName, "Backend role name is required");
+        requireNonNull(roleName, "Role name is required");
+        String path = "_plugins/_security/api/rolesmapping/" + roleName;
+        String body = String.format("{\"backend_roles\": [\"%s\"]}", backendRoleName);
+        HttpResponse response = putJson(path, body);
+        response.assertStatusCode(201);
+    }
+
+    protected final String getHttpServerUri() {
+        return "http" + (enableHTTPClientSSL ? "s" : "") + "://" + nodeHttpAddress.getHostString() + ":" + nodeHttpAddress.getPort();
+    }
+
+    protected final CloseableHttpClient getHTTPClient() {
+        HttpRoutePlanner routePlanner = Optional.ofNullable(sourceInetAddress).map(LocalAddressRoutePlanner::new).orElse(null);
+        var factory = new CloseableHttpClientFactory(sslContext, requestConfig, routePlanner, null);
+        return factory.getHTTPClient();
+    }
+
+    private Header[] mergeHeaders(Header header, Header... headers) {
+
+        if (headers == null || headers.length == 0) {
+            return new Header[] { header };
+        } else {
+            Header[] result = new Header[headers.length + 1];
+            result[0] = header;
+            System.arraycopy(headers, 0, result, 1, headers.length);
+            return result;
+        }
+    }
+
+    public static class HttpResponse {
+        private final CloseableHttpResponse inner;
+        private final String body;
+        private final Header[] header;
+        private final int statusCode;
+        private final String statusReason;
+
+        public HttpResponse(CloseableHttpResponse inner) throws IllegalStateException, IOException {
+            super();
+            this.inner = inner;
+            final HttpEntity entity = inner.getEntity();
+            if (entity == null) { // head request does not have a entity
+                this.body = "";
+            } else {
+                this.body = IOUtils.toString(entity.getContent(), StandardCharsets.UTF_8);
+            }
+            this.header = inner.getAllHeaders();
+            this.statusCode = inner.getStatusLine().getStatusCode();
+            this.statusReason = inner.getStatusLine().getReasonPhrase();
+            inner.close();
+        }
+
+        public String getContentType() {
+            Header h = getInner().getFirstHeader("content-type");
+            if (h != null) {
+                return h.getValue();
+            }
+            return null;
+        }
+
+        public boolean isJsonContentType() {
+            String ct = getContentType();
+            if (ct == null) {
+                return false;
+            }
+            return ct.contains("application/json");
+        }
+
+        public CloseableHttpResponse getInner() {
+            return inner;
+        }
+
+        public String getBody() {
+            return body;
+        }
+
+        public Header[] getHeader() {
+            return header;
+        }
+
+        public Optional<Header> findHeader(String name) {
+            return Arrays.stream(header)
+                .filter(header -> requireNonNull(name, "Header name is mandatory.").equalsIgnoreCase(header.getName()))
+                .findFirst();
+        }
+
+        public Header getHeader(String name) {
+            return findHeader(name).orElseThrow();
+        }
+
+        public boolean containHeader(String name) {
+            return findHeader(name).isPresent();
+        }
+
+        public int getStatusCode() {
+            return statusCode;
+        }
+
+        public String getStatusReason() {
+            return statusReason;
+        }
+
+        public List<Header> getHeaders() {
+            return header == null ? Collections.emptyList() : Arrays.asList(header);
+        }
+
+        public String getTextFromJsonBody(String jsonPointer) {
+            return getJsonNodeAt(jsonPointer).asText();
+        }
+
+        public List<String> getTextArrayFromJsonBody(String jsonPointer) {
+            return StreamSupport.stream(getJsonNodeAt(jsonPointer).spliterator(), false)
+                .map(JsonNode::textValue)
+                .collect(Collectors.toList());
+        }
+
+        public int getIntFromJsonBody(String jsonPointer) {
+            return getJsonNodeAt(jsonPointer).asInt();
+        }
+
+        public Boolean getBooleanFromJsonBody(String jsonPointer) {
+            return getJsonNodeAt(jsonPointer).asBoolean();
+        }
+
+        public Double getDoubleFromJsonBody(String jsonPointer) {
+            return getJsonNodeAt(jsonPointer).asDouble();
+        }
+
+        public Long getLongFromJsonBody(String jsonPointer) {
+            return getJsonNodeAt(jsonPointer).asLong();
+        }
+
+        private JsonNode getJsonNodeAt(String jsonPointer) {
+            try {
+                return toJsonNode().at(jsonPointer);
+            } catch (IOException e) {
+                throw new IllegalArgumentException("Cound not convert response body to JSON node ", e);
+            }
+        }
+
+        private JsonNode toJsonNode() throws JsonProcessingException, IOException {
+            return DefaultObjectMapper.objectMapper.readTree(getBody());
+        }
+
+        @Override
+        public String toString() {
+            return "HttpResponse [inner="
+                + inner
+                + ", body="
+                + body
+                + ", header="
+                + Arrays.toString(header)
+                + ", statusCode="
+                + statusCode
+                + ", statusReason="
+                + statusReason
+                + "]";
+        }
+
+        public <T> T getBodyAs(Class<T> authInfoClass) {
+            try {
+                return DefaultObjectMapper.readValue(getBody(), authInfoClass);
+            } catch (IOException e) {
+                throw new RuntimeException("Cannot parse response body", e);
+            }
+        }
+
+        public void assertStatusCode(int expectedHttpStatus) {
+            String reason = format("Expected status code is '%d', but was '%d'. Response body '%s'.", expectedHttpStatus, statusCode, body);
+            assertThat(reason, statusCode, equalTo(expectedHttpStatus));
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "TestRestClient [server=" + getHttpServerUri() + ", node=" + nodeHttpAddress + "]";
+    }
+
+    public RequestConfig getRequestConfig() {
+        return requestConfig;
+    }
+
+    public void setRequestConfig(RequestConfig requestConfig) {
+        this.requestConfig = requestConfig;
+    }
+
+    public boolean isSendHTTPClientCertificate() {
+        return sendHTTPClientCertificate;
+    }
+
+    public void setSendHTTPClientCertificate(boolean sendHTTPClientCertificate) {
+        this.sendHTTPClientCertificate = sendHTTPClientCertificate;
+    }
+
+    @Override
+    public void close() {
+        // TODO: Is there anything to clean up here?
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/TestRestClientConfiguration.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/TestRestClientConfiguration.java
new file mode 100644
index 0000000000..02028a5432
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/TestRestClientConfiguration.java
@@ -0,0 +1,174 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.cluster;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Base64;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.http.Header;
+import org.apache.http.message.BasicHeader;
+
+import org.opensearch.test.framework.cluster.OpenSearchClientProvider.UserCredentialsHolder;
+
+import static java.util.Objects.requireNonNull;
+
+/**
+* Object which groups some parameters needed for {@link TestRestClient} creation. The class was created to reduce number of parameters
+* of methods which are used to create {@link TestRestClient} . The class provides convenient builder-like methods. All fields of a class
+* are nullable.
+*/
+public class TestRestClientConfiguration {
+
+    /**
+    * Username
+    */
+    private String username;
+    /**
+    * Password
+    */
+    private String password;
+    /**
+    * HTTP headers which should be attached to each HTTP request which is sent by {@link TestRestClient}
+    */
+    private final List<Header> headers = new ArrayList<>();
+    /**
+    * IP address of client socket of {@link TestRestClient}
+    */
+    private InetAddress sourceInetAddress;
+
+    /**
+    * Set username
+    * @param username username
+    * @return builder
+    */
+    public TestRestClientConfiguration username(String username) {
+        this.username = username;
+        return this;
+    }
+
+    /**
+    * Set user's password
+    * @param password password
+    * @return builder
+    */
+    public TestRestClientConfiguration password(String password) {
+        this.password = password;
+        return this;
+    }
+
+    /**
+    * The method sets username and password read form <code>userCredentialsHolder</code>
+    * @param userCredentialsHolder source of credentials
+    * @return builder
+    */
+    public TestRestClientConfiguration credentials(UserCredentialsHolder userCredentialsHolder) {
+        Objects.requireNonNull(userCredentialsHolder, "User credential holder is required.");
+        this.username = userCredentialsHolder.getName();
+        this.password = userCredentialsHolder.getPassword();
+        return this;
+    }
+
+    /**
+    * Add HTTP headers which are attached to each HTTP request
+    * @param headers headers
+    * @return builder
+    */
+    public TestRestClientConfiguration header(final String headerName, final String headerValue) {
+        this.headers.add(
+            new BasicHeader(
+                Objects.requireNonNull(headerName, "Header names are required"),
+                Objects.requireNonNull(headerValue, "Header values are required")
+            )
+        );
+        return this;
+    }
+
+    /**
+    * Add HTTP headers which are attached to each HTTP request
+    * @param headers headers
+    * @return builder
+    */
+    public TestRestClientConfiguration headers(Header... headers) {
+        this.headers.addAll(Arrays.asList(Objects.requireNonNull(headers, "Headers are required")));
+        return this;
+    }
+
+    /**
+    * Add HTTP headers which are attached to each HTTP request
+    * @param headers list of headers
+    * @return builder
+    */
+    public TestRestClientConfiguration headers(List<Header> headers) {
+        this.headers.addAll(Objects.requireNonNull(headers, "Cannot add null headers"));
+        return this;
+    }
+
+    /**
+    * Set IP address of client socket used by {@link TestRestClient}
+    * @param sourceInetAddress IP address
+    * @return builder
+    */
+    public TestRestClientConfiguration sourceInetAddress(InetAddress sourceInetAddress) {
+        this.sourceInetAddress = sourceInetAddress;
+        return this;
+    }
+
+    public TestRestClientConfiguration sourceInetAddress(String sourceInetAddress) {
+        try {
+            this.sourceInetAddress = InetAddress.getByName(sourceInetAddress);
+            return this;
+        } catch (UnknownHostException e) {
+            throw new RuntimeException("Cannot get IP address for string " + sourceInetAddress, e);
+        }
+    }
+
+    public static TestRestClientConfiguration userWithSourceIp(UserCredentialsHolder credentials, String sourceIpAddress) {
+        return new TestRestClientConfiguration().credentials(credentials).sourceInetAddress(sourceIpAddress);
+    }
+
+    /**
+    * Return complete header list. Basic authentication header is created using fields {@link #username} and {@link #password}
+    * @return header list
+    */
+    List<Header> getHeaders() {
+        return Stream.concat(createBasicAuthHeader().stream(), headers.stream()).collect(Collectors.toList());
+    }
+
+    private Optional<Header> createBasicAuthHeader() {
+        if (containsCredentials()) {
+            return Optional.of(getBasicAuthHeader(username, password));
+        }
+        return Optional.empty();
+    }
+
+    private boolean containsCredentials() {
+        return StringUtils.isNoneBlank(username) && StringUtils.isNoneBlank(password);
+    }
+
+    InetAddress getSourceInetAddress() {
+        return sourceInetAddress;
+    }
+
+    public static Header getBasicAuthHeader(String user, String password) {
+        String value = "Basic "
+            + Base64.getEncoder().encodeToString((user + ":" + requireNonNull(password)).getBytes(StandardCharsets.UTF_8));
+        return new BasicHeader("Authorization", value);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/ldap/EmbeddedLDAPServer.java b/src/integrationTest/java/org/opensearch/test/framework/ldap/EmbeddedLDAPServer.java
new file mode 100755
index 0000000000..583a0cdaeb
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/ldap/EmbeddedLDAPServer.java
@@ -0,0 +1,56 @@
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.ldap;
+
+import java.util.Objects;
+
+import org.junit.rules.ExternalResource;
+
+import org.opensearch.test.framework.certificate.CertificateData;
+
+public class EmbeddedLDAPServer extends ExternalResource {
+
+    private final LdapServer server;
+
+    private final LdifData ldifData;
+
+    public EmbeddedLDAPServer(CertificateData trustAnchor, CertificateData ldapCertificate, LdifData ldifData) {
+        this.ldifData = Objects.requireNonNull(ldifData, "Ldif data is required");
+        this.server = new LdapServer(trustAnchor, ldapCertificate);
+    }
+
+    @Override
+    protected void before() {
+        try {
+            server.start(ldifData);
+        } catch (Exception e) {
+            throw new RuntimeException("Cannot start ldap server", e);
+        }
+    }
+
+    @Override
+    protected void after() {
+        try {
+            server.stop();
+        } catch (InterruptedException e) {
+            throw new RuntimeException("Cannot stop LDAP server.", e);
+        }
+    }
+
+    public int getLdapNonTlsPort() {
+        return server.getLdapNonTlsPort();
+    }
+
+    public int getLdapTlsPort() {
+        return server.getLdapsTlsPort();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/ldap/LdapServer.java b/src/integrationTest/java/org/opensearch/test/framework/ldap/LdapServer.java
new file mode 100644
index 0000000000..18a14242cc
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/ldap/LdapServer.java
@@ -0,0 +1,226 @@
+/*
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+* Modifications Copyright OpenSearch Contributors. See
+* GitHub history for details.
+*/
+
+package org.opensearch.test.framework.ldap;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.StringReader;
+import java.net.BindException;
+import java.security.KeyStore;
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
+import java.security.UnrecoverableKeyException;
+import java.security.cert.Certificate;
+import java.security.cert.CertificateException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.TrustManagerFactory;
+
+import com.unboundid.ldap.listener.InMemoryDirectoryServer;
+import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig;
+import com.unboundid.ldap.listener.InMemoryListenerConfig;
+import com.unboundid.ldap.sdk.DN;
+import com.unboundid.ldap.sdk.Entry;
+import com.unboundid.ldap.sdk.LDAPException;
+import com.unboundid.ldap.sdk.schema.Schema;
+import com.unboundid.ldif.LDIFReader;
+import com.unboundid.util.ssl.SSLUtil;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+import org.opensearch.test.framework.certificate.CertificateData;
+import org.opensearch.test.framework.cluster.SocketUtils;
+
+/**
+* Based on class com.amazon.dlic.auth.ldap.srv.LdapServer from older tests
+*/
+final class LdapServer {
+    private static final Logger log = LogManager.getLogger(LdapServer.class);
+
+    private static final int LOCK_TIMEOUT = 60;
+    private static final TimeUnit TIME_UNIT = TimeUnit.SECONDS;
+
+    private static final String LOCK_TIMEOUT_MSG = "Unable to obtain lock due to timeout after "
+        + LOCK_TIMEOUT
+        + " "
+        + TIME_UNIT.toString();
+    private static final String SERVER_NOT_STARTED = "The LDAP server is not started.";
+    private static final String SERVER_ALREADY_STARTED = "The LDAP server is already started.";
+
+    private final CertificateData trustAnchor;
+
+    private final CertificateData ldapCertificate;
+
+    private InMemoryDirectoryServer server;
+    private final AtomicBoolean isStarted = new AtomicBoolean(Boolean.FALSE);
+    private final ReentrantLock serverStateLock = new ReentrantLock();
+
+    private int ldapNonTlsPort = -1;
+    private int ldapTlsPort = -1;
+
+    public LdapServer(CertificateData trustAnchor, CertificateData ldapCertificate) {
+        this.trustAnchor = trustAnchor;
+        this.ldapCertificate = ldapCertificate;
+    }
+
+    public boolean isStarted() {
+        return this.isStarted.get();
+    }
+
+    public int getLdapNonTlsPort() {
+        return ldapNonTlsPort;
+    }
+
+    public int getLdapsTlsPort() {
+        return ldapTlsPort;
+    }
+
+    public void start(LdifData ldifData) throws Exception {
+        Objects.requireNonNull(ldifData, "Ldif data is required");
+        boolean hasLock = false;
+        try {
+            hasLock = serverStateLock.tryLock(LdapServer.LOCK_TIMEOUT, LdapServer.TIME_UNIT);
+            if (hasLock) {
+                doStart(ldifData);
+                this.isStarted.set(Boolean.TRUE);
+            } else {
+                throw new IllegalStateException(LdapServer.LOCK_TIMEOUT_MSG);
+            }
+        } catch (InterruptedException ioe) {
+            // lock interrupted
+            log.error("LDAP server start lock interrupted", ioe);
+            throw ioe;
+        } finally {
+            if (hasLock) {
+                serverStateLock.unlock();
+            }
+        }
+    }
+
+    private void doStart(LdifData ldifData) throws Exception {
+        if (isStarted.get()) {
+            throw new IllegalStateException(LdapServer.SERVER_ALREADY_STARTED);
+        }
+        configureAndStartServer(ldifData);
+    }
+
+    private Collection<InMemoryListenerConfig> getInMemoryListenerConfigs() throws Exception {
+        KeyStore keyStore = createEmptyKeyStore();
+        addLdapCertificatesToKeystore(keyStore);
+        final SSLUtil sslUtil = new SSLUtil(createKeyManager(keyStore), createTrustManagers(keyStore));
+
+        ldapNonTlsPort = SocketUtils.findAvailableTcpPort();
+        ldapTlsPort = SocketUtils.findAvailableTcpPort();
+
+        Collection<InMemoryListenerConfig> listenerConfigs = new ArrayList<>();
+        listenerConfigs.add(InMemoryListenerConfig.createLDAPConfig("ldap", null, ldapNonTlsPort, sslUtil.createSSLSocketFactory()));
+        listenerConfigs.add(InMemoryListenerConfig.createLDAPSConfig("ldaps", ldapTlsPort, sslUtil.createSSLServerSocketFactory()));
+        return listenerConfigs;
+    }
+
+    private static KeyManager[] createKeyManager(KeyStore keyStore) throws NoSuchAlgorithmException, KeyStoreException,
+        UnrecoverableKeyException {
+        KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
+        keyManagerFactory.init(keyStore, null);
+        return keyManagerFactory.getKeyManagers();
+    }
+
+    private static TrustManager[] createTrustManagers(KeyStore keyStore) throws NoSuchAlgorithmException, KeyStoreException {
+        TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
+        trustManagerFactory.init(keyStore);
+        return trustManagerFactory.getTrustManagers();
+    }
+
+    private void addLdapCertificatesToKeystore(KeyStore keyStore) throws KeyStoreException {
+        keyStore.setCertificateEntry("trustAnchor", trustAnchor.certificate());
+        keyStore.setKeyEntry("ldap-key", ldapCertificate.getKey(), null, new Certificate[] { ldapCertificate.certificate() });
+    }
+
+    private static KeyStore createEmptyKeyStore() throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
+        KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
+        keyStore.load(null);
+        return keyStore;
+    }
+
+    private synchronized void configureAndStartServer(LdifData ldifData) throws Exception {
+        Collection<InMemoryListenerConfig> listenerConfigs = getInMemoryListenerConfigs();
+
+        Schema schema = Schema.getDefaultStandardSchema();
+
+        final String rootObjectDN = ldifData.getRootDistinguishedName();
+        InMemoryDirectoryServerConfig config = new InMemoryDirectoryServerConfig(new DN(rootObjectDN));
+
+        config.setSchema(schema);  // schema can be set on the rootDN too, per javadoc.
+        config.setListenerConfigs(listenerConfigs);
+        config.setEnforceAttributeSyntaxCompliance(false);
+        config.setEnforceSingleStructuralObjectClass(false);
+
+        server = new InMemoryDirectoryServer(config);
+
+        try {
+            /* Clear entries from server. */
+            server.clear();
+            server.startListening();
+            loadLdifData(ldifData);
+        } catch (LDAPException ldape) {
+            if (ldape.getMessage().contains("java.net.BindException")) {
+                throw new BindException(ldape.getMessage());
+            }
+            throw ldape;
+        }
+
+    }
+
+    public void stop() throws InterruptedException {
+        boolean hasLock = false;
+        try {
+            hasLock = serverStateLock.tryLock(LdapServer.LOCK_TIMEOUT, LdapServer.TIME_UNIT);
+            if (hasLock) {
+                if (!isStarted.get()) {
+                    throw new IllegalStateException(LdapServer.SERVER_NOT_STARTED);
+                }
+                log.info("Shutting down in-Memory Ldap Server.");
+                server.shutDown(true);
+            } else {
+                throw new IllegalStateException(LdapServer.LOCK_TIMEOUT_MSG);
+            }
+        } catch (InterruptedException ioe) {
+            // lock interrupted
+            log.error("Canot stop LDAP server due to interruption", ioe);
+            throw ioe;
+        } finally {
+            if (hasLock) {
+                serverStateLock.unlock();
+            }
+        }
+    }
+
+    private void loadLdifData(LdifData ldifData) throws Exception {
+        try (LDIFReader r = new LDIFReader(new BufferedReader(new StringReader(ldifData.getContent())))) {
+            Entry entry;
+            while ((entry = r.readEntry()) != null) {
+                server.add(entry);
+            }
+        } catch (Exception e) {
+            log.error("Cannot load data into LDAP server", e);
+            throw e;
+        }
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/ldap/LdifBuilder.java b/src/integrationTest/java/org/opensearch/test/framework/ldap/LdifBuilder.java
new file mode 100644
index 0000000000..87f01a2bbc
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/ldap/LdifBuilder.java
@@ -0,0 +1,66 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.ldap;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class LdifBuilder {
+
+    private static final Logger log = LogManager.getLogger(LdifBuilder.class);
+
+    private final List<Record> records;
+
+    private Record root;
+
+    public LdifBuilder() {
+        this.records = new ArrayList<>();
+    }
+
+    public RecordBuilder root(String distinguishedName) {
+        if (root != null) {
+            throw new IllegalStateException("Root object is already defined");
+        }
+        return new RecordBuilder(this, distinguishedName);
+    }
+
+    RecordBuilder newRecord(String distinguishedName) {
+        if (root == null) {
+            throw new IllegalStateException("Define root object first");
+        }
+        return new RecordBuilder(this, distinguishedName);
+    }
+
+    void addRecord(Record record) {
+        Objects.requireNonNull(record, "Cannot add null record");
+        if (records.isEmpty()) {
+            this.root = record;
+        }
+        records.add(Objects.requireNonNull(record, "Cannot add null record"));
+    }
+
+    public LdifData buildLdif() {
+        String ldif = records.stream().map(record -> record.toLdifRepresentation()).collect(Collectors.joining("\n##########\n"));
+        log.debug("Built ldif file: \n{}", ldif);
+        return new LdifData(getRootDistinguishedName(), ldif);
+    }
+
+    private String getRootDistinguishedName() {
+        if (root == null) {
+            throw new IllegalStateException("Root object is not present.");
+        }
+        return root.getDistinguishedName();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/ldap/LdifData.java b/src/integrationTest/java/org/opensearch/test/framework/ldap/LdifData.java
new file mode 100644
index 0000000000..4a1af5bc08
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/ldap/LdifData.java
@@ -0,0 +1,48 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.ldap;
+
+import org.apache.commons.lang3.StringUtils;
+
+/**
+* Value object which represents LDIF file data and some metadata. Ensure type safety.
+*/
+public class LdifData {
+
+    private final String rootDistinguishedName;
+
+    private final String content;
+
+    LdifData(String rootDistinguishedName, String content) {
+        this.rootDistinguishedName = requireNotBlank(rootDistinguishedName, "Root distinguished name is required");
+        this.content = requireNotBlank(content, "Ldif file content is required");
+
+    }
+
+    private static String requireNotBlank(String string, String message) {
+        if (StringUtils.isBlank(string)) {
+            throw new IllegalArgumentException(message);
+        }
+        return string;
+    }
+
+    String getContent() {
+        return content;
+    }
+
+    String getRootDistinguishedName() {
+        return rootDistinguishedName;
+    }
+
+    @Override
+    public String toString() {
+        return "LdifData{" + "content='" + content + '\'' + '}';
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/ldap/Record.java b/src/integrationTest/java/org/opensearch/test/framework/ldap/Record.java
new file mode 100644
index 0000000000..48e7484777
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/ldap/Record.java
@@ -0,0 +1,67 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.ldap;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.tuple.Pair;
+
+class Record {
+
+    private final String distinguishedName;
+
+    private final List<String> classes;
+    private final List<Pair<String, String>> attributes;
+
+    public Record(String distinguishedName) {
+        this.distinguishedName = Objects.requireNonNull(distinguishedName, "Distinguished name is required");
+        this.classes = new ArrayList<>();
+        this.attributes = new ArrayList<>();
+    }
+
+    public String getDistinguishedName() {
+        return distinguishedName;
+    }
+
+    public void addClass(String clazz) {
+        classes.add(Objects.requireNonNull(clazz, "Object class is required."));
+    }
+
+    public void addAttribute(String name, String value) {
+        Objects.requireNonNull(name, "Attribute name is required");
+        Objects.requireNonNull(value, "Attribute value is required");
+        attributes.add(Pair.of(name, value));
+    }
+
+    boolean isValid() {
+        return classes.size() > 0;
+    }
+
+    String toLdifRepresentation() {
+        return new StringBuilder("dn: ").append(distinguishedName)
+            .append("\n")
+            .append(formattedClasses())
+            .append("\n")
+            .append(formattedAttributes())
+            .append("\n")
+            .toString();
+    }
+
+    private String formattedAttributes() {
+        return attributes.stream().map(pair -> pair.getKey() + ": " + pair.getValue()).collect(Collectors.joining("\n"));
+    }
+
+    private String formattedClasses() {
+        return classes.stream().map(clazz -> "objectClass: " + clazz).collect(Collectors.joining("\n"));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/ldap/RecordBuilder.java b/src/integrationTest/java/org/opensearch/test/framework/ldap/RecordBuilder.java
new file mode 100644
index 0000000000..1df27c72fe
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/ldap/RecordBuilder.java
@@ -0,0 +1,92 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.ldap;
+
+import java.util.Objects;
+
+public class RecordBuilder {
+
+    private final LdifBuilder builder;
+    private final Record record;
+
+    RecordBuilder(LdifBuilder builder, String distinguishedName) {
+        this.builder = Objects.requireNonNull(builder, "LdifBuilder is required");
+        this.record = new Record(distinguishedName);
+    }
+
+    public RecordBuilder classes(String... classes) {
+        for (String clazz : classes) {
+            this.record.addClass(clazz);
+        }
+        return this;
+    }
+
+    public RecordBuilder dn(String distinguishedName) {
+        record.addAttribute("dn", distinguishedName);
+        return this;
+    }
+
+    public RecordBuilder dc(String domainComponent) {
+        record.addAttribute("dc", domainComponent);
+        return this;
+    }
+
+    public RecordBuilder ou(String organizationUnit) {
+        record.addAttribute("ou", organizationUnit);
+        return this;
+    }
+
+    public RecordBuilder cn(String commonName) {
+        record.addAttribute("cn", commonName);
+        return this;
+    }
+
+    public RecordBuilder sn(String surname) {
+        record.addAttribute("sn", surname);
+        return this;
+    }
+
+    public RecordBuilder uid(String userId) {
+        record.addAttribute("uid", userId);
+        return this;
+    }
+
+    public RecordBuilder userPassword(String password) {
+        record.addAttribute("userpassword", password);
+        return this;
+    }
+
+    public RecordBuilder mail(String emailAddress) {
+        record.addAttribute("mail", emailAddress);
+        return this;
+    }
+
+    public RecordBuilder uniqueMember(String userDistinguishedName) {
+        record.addAttribute("uniquemember", userDistinguishedName);
+        return this;
+    }
+
+    public RecordBuilder attribute(String name, String value) {
+        record.addAttribute(name, value);
+        return this;
+    }
+
+    public LdifBuilder buildRecord() {
+        if (record.isValid() == false) {
+            throw new IllegalStateException("Record is invalid");
+        }
+        builder.addRecord(record);
+        return builder;
+    }
+
+    public RecordBuilder newRecord(String distinguishedName) {
+        return buildRecord().newRecord(distinguishedName);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/log/LogCapturingAppender.java b/src/integrationTest/java/org/opensearch/test/framework/log/LogCapturingAppender.java
new file mode 100644
index 0000000000..5673f1bd3e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/log/LogCapturingAppender.java
@@ -0,0 +1,134 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.log;
+
+import com.google.common.collect.EvictingQueue;
+import com.google.common.collect.Queues;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.Core;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Property;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Queue;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.stream.Collectors;
+
+import static org.opensearch.test.framework.log.LogCapturingAppender.PLUGIN_NAME;
+
+/**
+* <p>The class acts as Log4j2 appender with a special purpose. The appender is used to capture logs which are generated during tests and
+* then test can examine logs. To use the appender it is necessary to:</p>
+* <ol>
+*     <li>Add package with appender to log4j2 package scan in Log4j2 configuration file</li>
+*     <li>Create appender in log4j2 configuration</li>
+*     <li>Assign required loggers to appender</li>
+*     <li>Enable appender for certain classes with method {@link #enable(String...)}. Each test can enable appender for distinct classes</li>
+* </ol>
+*/
+@Plugin(name = PLUGIN_NAME, category = Core.CATEGORY_NAME, elementType = Appender.ELEMENT_TYPE, printObject = true)
+public class LogCapturingAppender extends AbstractAppender {
+
+    public final static String PLUGIN_NAME = "LogCapturingAppender";
+    /**
+    * Appender stores only last <code>MAX_SIZE</code> messages to avoid excessive RAM memory usage.
+    */
+    public static final int MAX_SIZE = 100;
+
+    /**
+    * Buffer for captured log messages
+    */
+    private static final Queue<LogMessage> messages = Queues.synchronizedQueue(EvictingQueue.create(MAX_SIZE));
+
+    /**
+    * Log messages are stored in buffer {@link #messages} only for classes which are added to the {@link #activeLoggers} set.
+    */
+    private static final Set<String> activeLoggers = ConcurrentHashMap.newKeySet();
+
+    protected LogCapturingAppender(
+        String name,
+        Filter filter,
+        Layout<? extends Serializable> layout,
+        boolean ignoreExceptions,
+        Property[] properties
+    ) {
+        super(name, filter, layout, ignoreExceptions, properties);
+    }
+
+    /**
+    * Method used by Log4j2 to create appender
+    * @param name appender name from Log4j2 configuration
+    * @return newly created appender
+    */
+    @PluginFactory
+    public static LogCapturingAppender createAppender(
+        @PluginAttribute(value = "name", defaultString = "logCapturingAppender") String name
+    ) {
+        return new LogCapturingAppender(name, null, null, true, Property.EMPTY_ARRAY);
+    }
+
+    /**
+    * Method invoked by Log4j2 to append log events
+    * @param event The LogEvent, represents log message.
+    */
+    @Override
+    public void append(LogEvent event) {
+        String loggerName = event.getLoggerName();
+        boolean loggable = activeLoggers.contains(loggerName);
+        if (loggable) {
+            event.getThrown();
+            messages.add(new LogMessage(event.getMessage().getFormattedMessage(), event.getThrown()));
+        }
+    }
+
+    /**
+    * To collect log messages form given logger the logger name must be passed to {@link #enable(String...)} method.
+    * @param loggerNames logger names
+    */
+    public static void enable(String... loggerNames) {
+        disable();
+        activeLoggers.addAll(Arrays.asList(loggerNames));
+    }
+
+    /**
+    * Invocation cause that appender stops collecting log messages. Additionally, memory used by collected messages so far is released.
+    */
+    public static void disable() {
+        activeLoggers.clear();
+        messages.clear();
+    }
+
+    /**
+    * Is used to obtain gathered log messages
+    * @return Log messages
+    */
+    public static List<LogMessage> getLogMessages() {
+        return new ArrayList<>(messages);
+    }
+
+    public static List<String> getLogMessagesAsString() {
+        return getLogMessages().stream().map(LogMessage::getMessage).collect(Collectors.toList());
+    }
+
+    @Override
+    public String toString() {
+        return "LogCapturingAppender{}";
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/log/LogMessage.java b/src/integrationTest/java/org/opensearch/test/framework/log/LogMessage.java
new file mode 100644
index 0000000000..9342c7ee30
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/log/LogMessage.java
@@ -0,0 +1,40 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.log;
+
+import java.util.Objects;
+import java.util.Optional;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+
+class LogMessage {
+
+    private final String message;
+    private final String stackTrace;
+
+    public LogMessage(String message, Throwable throwable) {
+        this.message = message;
+        this.stackTrace = Optional.ofNullable(throwable).map(ExceptionUtils::getStackTrace).orElse("");
+    }
+
+    public boolean containMessage(String expectedMessage) {
+        Objects.requireNonNull(expectedMessage, "Expected message must not be null.");
+        return expectedMessage.equals(message);
+    }
+
+    public boolean stackTraceContains(String stackTraceFragment) {
+        Objects.requireNonNull(stackTraceFragment, "Stack trace fragment is required.");
+        return stackTrace.contains(stackTraceFragment);
+    }
+
+    public String getMessage() {
+        return message;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/log/LogsRule.java b/src/integrationTest/java/org/opensearch/test/framework/log/LogsRule.java
new file mode 100644
index 0000000000..46fa252df4
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/log/LogsRule.java
@@ -0,0 +1,93 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.log;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import org.junit.rules.ExternalResource;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.hasItem;
+
+/**
+* The class is a JUnit 4 rule and enables developers to write assertion related to log messages generated in the course of test. To use
+* {@link LogsRule} appender  {@link LogCapturingAppender} must be properly configured. The rule also manages {@link LogCapturingAppender}
+* so that memory occupied by gathered log messages is released after each test.
+*/
+public class LogsRule extends ExternalResource {
+
+    private final String[] loggerNames;
+
+    /**
+    * Constructor used to start gathering log messages from certain loggers
+    * @param loggerNames Loggers names. Log messages are collected only if the log message is associated with the logger with a name which
+    *                    is present in <code>loggerNames</code> parameter.
+    */
+    public LogsRule(String... loggerNames) {
+        this.loggerNames = Objects.requireNonNull(loggerNames, "Logger names are required");
+    }
+
+    @Override
+    protected void before() {
+        LogCapturingAppender.enable(loggerNames);
+    }
+
+    @Override
+    protected void after() {
+        LogCapturingAppender.disable();
+    }
+
+    /**
+    * Check if during the tests certain log message was logged
+    * @param expectedLogMessage expected log message
+    */
+    public void assertThatContainExactly(String expectedLogMessage) {
+        List<String> messages = LogCapturingAppender.getLogMessagesAsString();
+        String reason = reasonMessage(expectedLogMessage, messages);
+        assertThat(reason, messages, hasItem(expectedLogMessage));
+    }
+
+    /**
+    * Check if during the tests certain log message was logged
+    * @param messageFragment expected log message fragment
+    */
+    public void assertThatContain(String messageFragment) {
+        List<String> messages = LogCapturingAppender.getLogMessagesAsString();
+        ;
+        String reason = reasonMessage(messageFragment, messages);
+        assertThat(reason, messages, hasItem(containsString(messageFragment)));
+    }
+
+    /**
+    * Check if during the tests a stack trace was logged which contain given fragment
+    * @param stackTraceFragment stack trace fragment
+    */
+    public void assertThatStackTraceContain(String stackTraceFragment) {
+        long count = LogCapturingAppender.getLogMessages()
+            .stream()
+            .filter(logMessage -> logMessage.stackTraceContains(stackTraceFragment))
+            .count();
+        String reason = "Stack trace does not contain element " + stackTraceFragment;
+        assertThat(reason, count, greaterThan(0L));
+    }
+
+    private static String reasonMessage(String expectedLogMessage, List<String> messages) {
+        String concatenatedLogMessages = messages.stream().map(message -> String.format("'%s'", message)).collect(Collectors.joining(", "));
+        return String.format(
+            "Expected message '%s' has not been found in logs. All captured log messages: %s",
+            expectedLogMessage,
+            concatenatedLogMessages
+        );
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/AliasExistsMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/AliasExistsMatcher.java
new file mode 100644
index 0000000000..42723168ff
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/AliasExistsMatcher.java
@@ -0,0 +1,66 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
+import java.util.stream.StreamSupport;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest;
+import org.opensearch.action.admin.indices.alias.get.GetAliasesResponse;
+import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.AliasMetadata;
+
+import static java.util.Objects.requireNonNull;
+import static java.util.Spliterator.IMMUTABLE;
+import static java.util.Spliterators.spliteratorUnknownSize;
+
+class AliasExistsMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private final String aliasName;
+
+    public AliasExistsMatcher(String aliasName) {
+        this.aliasName = requireNonNull(aliasName, "Alias name is required");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        try {
+            GetAliasesResponse response = client.admin().indices().getAliases(new GetAliasesRequest(aliasName)).get();
+
+            Map<String, List<AliasMetadata>> aliases = response.getAliases();
+            Set<String> actualAliasNames = StreamSupport.stream(spliteratorUnknownSize(aliases.values().iterator(), IMMUTABLE), false)
+                .flatMap(Collection::stream)
+                .map(AliasMetadata::getAlias)
+                .collect(Collectors.toSet());
+            if (actualAliasNames.contains(aliasName) == false) {
+                String existingAliases = String.join(", ", actualAliasNames);
+                mismatchDescription.appendText(" alias does not exist, defined aliases ").appendValue(existingAliases);
+                return false;
+            }
+            return true;
+        } catch (InterruptedException | ExecutionException e) {
+            mismatchDescription.appendText("Error occurred during checking if cluster contains alias ").appendValue(e);
+            return false;
+        }
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Cluster should contain ").appendValue(aliasName).appendText(" alias");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/AtLeastCertainNumberOfAuditsFulfillPredicateMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/AtLeastCertainNumberOfAuditsFulfillPredicateMatcher.java
new file mode 100644
index 0000000000..ba7feed4c3
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/AtLeastCertainNumberOfAuditsFulfillPredicateMatcher.java
@@ -0,0 +1,48 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.hamcrest.Description;
+
+import org.opensearch.security.auditlog.impl.AuditMessage;
+
+class AtLeastCertainNumberOfAuditsFulfillPredicateMatcher extends AuditsFulfillPredicateMatcher {
+
+    private final long minimumNumberOfAudits;
+
+    public AtLeastCertainNumberOfAuditsFulfillPredicateMatcher(Predicate<AuditMessage> predicate, long minimumNumberOfAudits) {
+        super(predicate);
+        this.minimumNumberOfAudits = minimumNumberOfAudits;
+    }
+
+    @Override
+    protected boolean matchesSafely(List<AuditMessage> audits, Description mismatchDescription) {
+        long count = countAuditsWhichMatchPredicate(audits);
+        if (count < minimumNumberOfAudits) {
+            mismatchDescription.appendText(" only ")
+                .appendValue(count)
+                .appendText(" match predicate. Examined audit logs ")
+                .appendText(auditMessagesToString(audits));
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Al least ")
+            .appendValue(minimumNumberOfAudits)
+            .appendText(" audits records should match predicate ")
+            .appendValue(predicate);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/AuditMessageMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/AuditMessageMatchers.java
new file mode 100644
index 0000000000..080d4473e1
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/AuditMessageMatchers.java
@@ -0,0 +1,38 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.security.auditlog.impl.AuditMessage;
+
+public class AuditMessageMatchers {
+
+    private AuditMessageMatchers() {
+
+    }
+
+    public static Matcher<List<AuditMessage>> exactNumberOfAuditsFulfillPredicate(
+        long exactNumberOfAuditMessages,
+        Predicate<AuditMessage> predicate
+    ) {
+        return new ExactNumberOfAuditsFulfillPredicateMatcher(exactNumberOfAuditMessages, predicate);
+    }
+
+    public static Matcher<List<AuditMessage>> atLeastCertainNumberOfAuditsFulfillPredicate(
+        long minimumNumberOfAudits,
+        Predicate<AuditMessage> predicate
+    ) {
+        return new AtLeastCertainNumberOfAuditsFulfillPredicateMatcher(predicate, minimumNumberOfAudits);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/AuditsFulfillPredicateMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/AuditsFulfillPredicateMatcher.java
new file mode 100644
index 0000000000..2864c1df81
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/AuditsFulfillPredicateMatcher.java
@@ -0,0 +1,36 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.List;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.security.auditlog.impl.AuditMessage;
+
+abstract class AuditsFulfillPredicateMatcher extends TypeSafeDiagnosingMatcher<List<AuditMessage>> {
+
+    protected final Predicate<AuditMessage> predicate;
+
+    public AuditsFulfillPredicateMatcher(Predicate<AuditMessage> predicate) {
+        this.predicate = predicate;
+    }
+
+    protected String auditMessagesToString(List<AuditMessage> audits) {
+        return audits.stream().map(AuditMessage::toString).collect(Collectors.joining(",\n"));
+    }
+
+    protected long countAuditsWhichMatchPredicate(List<AuditMessage> audits) {
+        return audits.stream().filter(predicate).count();
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseContainExceptionsAtIndexMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseContainExceptionsAtIndexMatcher.java
new file mode 100644
index 0000000000..2fb2cc5e74
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseContainExceptionsAtIndexMatcher.java
@@ -0,0 +1,76 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.bulk.BulkItemResponse;
+import org.opensearch.action.bulk.BulkResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class BulkResponseContainExceptionsAtIndexMatcher extends TypeSafeDiagnosingMatcher<BulkResponse> {
+
+    private final int errorIndex;
+    private final Matcher<Throwable> exceptionMatcher;
+
+    public BulkResponseContainExceptionsAtIndexMatcher(int errorIndex, Matcher<Throwable> exceptionMatcher) {
+        this.errorIndex = errorIndex;
+        this.exceptionMatcher = requireNonNull(exceptionMatcher, "Exception matcher is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(BulkResponse response, Description mismatchDescription) {
+        if (response.hasFailures() == false) {
+            mismatchDescription.appendText("received successful bulk response what is not expected.");
+            return false;
+        }
+        BulkItemResponse[] items = response.getItems();
+        if ((items == null) || (items.length == 0) || (errorIndex >= items.length)) {
+            mismatchDescription.appendText("bulk response does not contain item with index ").appendValue(errorIndex);
+            return false;
+        }
+        BulkItemResponse item = items[errorIndex];
+        if (item == null) {
+            mismatchDescription.appendText("bulk item response with index ").appendValue(errorIndex).appendText(" is null.");
+            return false;
+        }
+        BulkItemResponse.Failure failure = item.getFailure();
+        if (failure == null) {
+            mismatchDescription.appendText("bulk response item with index ")
+                .appendValue(errorIndex)
+                .appendText(" does not contain failure");
+            return false;
+        }
+        Exception exception = failure.getCause();
+        if (exception == null) {
+            mismatchDescription.appendText("bulk response item with index ")
+                .appendValue(errorIndex)
+                .appendText(" does not contain exception.");
+            return false;
+        }
+        if (exceptionMatcher.matches(exception) == false) {
+            mismatchDescription.appendText("bulk response item with index ")
+                .appendValue(errorIndex)
+                .appendText(" contains incorrect exception which is ")
+                .appendValue(exception);
+            return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("bulk response should contain exceptions which indicate failure");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseContainExceptionsMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseContainExceptionsMatcher.java
new file mode 100644
index 0000000000..b5e46ba9e2
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseContainExceptionsMatcher.java
@@ -0,0 +1,71 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.bulk.BulkItemResponse;
+import org.opensearch.action.bulk.BulkResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class BulkResponseContainExceptionsMatcher extends TypeSafeDiagnosingMatcher<BulkResponse> {
+
+    private final Matcher<Throwable> exceptionMatcher;
+
+    public BulkResponseContainExceptionsMatcher(Matcher<Throwable> exceptionMatcher) {
+        this.exceptionMatcher = requireNonNull(exceptionMatcher, "Exception matcher is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(BulkResponse response, Description mismatchDescription) {
+        if (response.hasFailures() == false) {
+            mismatchDescription.appendText("received successful bulk response what is not expected.");
+            return false;
+        }
+        BulkItemResponse[] items = response.getItems();
+        if ((items == null) || (items.length == 0)) {
+            mismatchDescription.appendText("bulk response does not contain items ").appendValue(items);
+            return false;
+        }
+        for (int i = 0; i < items.length; ++i) {
+            BulkItemResponse item = items[i];
+            if (item == null) {
+                mismatchDescription.appendText("bulk item response with index ").appendValue(i).appendText(" is null.");
+                return false;
+            }
+            BulkItemResponse.Failure failure = item.getFailure();
+            if (failure == null) {
+                mismatchDescription.appendText("bulk response item with index ").appendValue(i).appendText(" does not contain failure");
+                return false;
+            }
+            Exception exception = failure.getCause();
+            if (exception == null) {
+                mismatchDescription.appendText("bulk response item with index ").appendValue(i).appendText(" does not contain exception.");
+                return false;
+            }
+            if (exceptionMatcher.matches(exception) == false) {
+                mismatchDescription.appendText("bulk response item with index ")
+                    .appendValue(i)
+                    .appendText(" contains incorrect exception which is ")
+                    .appendValue(exception);
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("bulk response should contain exceptions which indicate failure");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseMatchers.java
new file mode 100644
index 0000000000..eedcd3a3a0
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/BulkResponseMatchers.java
@@ -0,0 +1,37 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.bulk.BulkResponse;
+
+public class BulkResponseMatchers {
+
+    private BulkResponseMatchers() {
+
+    }
+
+    public static Matcher<BulkResponse> successBulkResponse() {
+        return new SuccessBulkResponseMatcher();
+    }
+
+    public static Matcher<BulkResponse> failureBulkResponse() {
+        return new FailureBulkResponseMatcher();
+    }
+
+    public static Matcher<BulkResponse> bulkResponseContainExceptions(Matcher<Throwable> exceptionMatcher) {
+        return new BulkResponseContainExceptionsMatcher(exceptionMatcher);
+    }
+
+    public static Matcher<BulkResponse> bulkResponseContainExceptions(int index, Matcher<Throwable> exceptionMatcher) {
+        return new BulkResponseContainExceptionsAtIndexMatcher(index, exceptionMatcher);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainDocumentCountIndexMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainDocumentCountIndexMatcher.java
new file mode 100644
index 0000000000..91d2a28ef8
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainDocumentCountIndexMatcher.java
@@ -0,0 +1,43 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.get.GetIndexResponse;
+import org.opensearch.client.Client;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static java.util.Objects.requireNonNull;
+
+class ClusterContainDocumentCountIndexMatcher extends TypeSafeDiagnosingMatcher<LocalCluster> {
+
+    private final String indexName;
+    private final int expectedDocumentCount;
+
+    public ClusterContainDocumentCountIndexMatcher(String indexName, int expectedDocumentCount) {
+        this.indexName = requireNonNull(indexName, "Index name is required.");
+        this.expectedDocumentCount = expectedDocumentCount;
+    }
+
+    @Override
+    protected boolean matchesSafely(LocalCluster cluster, Description mismatchDescription) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            GetIndexResponse response = client.admin().indices().getIndex(null).actionGet();
+        }
+        return false;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("contains ").appendValue(expectedDocumentCount).appendText(" in index ").appendText(indexName);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainSuccessSnapshotMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainSuccessSnapshotMatcher.java
new file mode 100644
index 0000000000..362663e07b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainSuccessSnapshotMatcher.java
@@ -0,0 +1,69 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.stream.Collectors;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
+import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse;
+import org.opensearch.client.Client;
+import org.opensearch.snapshots.SnapshotMissingException;
+import org.opensearch.snapshots.SnapshotState;
+
+import static java.util.Objects.requireNonNull;
+
+class ClusterContainSuccessSnapshotMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private final String repositoryName;
+    private final String snapshotName;
+
+    public ClusterContainSuccessSnapshotMatcher(String repositoryName, String snapshotName) {
+        this.repositoryName = requireNonNull(repositoryName, "Snapshot repository name is required.");
+        this.snapshotName = requireNonNull(snapshotName, "Snapshot name is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        try {
+            GetSnapshotsRequest request = new GetSnapshotsRequest(repositoryName, new String[] { snapshotName });
+            GetSnapshotsResponse response = client.admin().cluster().getSnapshots(request).actionGet();
+            long count = response.getSnapshots()
+                .stream()
+                .map(snapshot -> snapshot.state())
+                .filter(status -> SnapshotState.SUCCESS.equals(status))
+                .count();
+            if (count != 1) {
+                String snapshotStatuses = response.getSnapshots()
+                    .stream()
+                    .map(info -> String.format("%s %s", info.snapshotId().getName(), info.state()))
+                    .collect(Collectors.joining(", "));
+                mismatchDescription.appendText("snapshot is not present or has incorrect state, snapshots statuses ")
+                    .appendValue(snapshotStatuses);
+                return false;
+            }
+        } catch (SnapshotMissingException e) {
+            mismatchDescription.appendText(" snapshot does not exist");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Cluster contain snapshot ")
+            .appendValue(snapshotName)
+            .appendText(" in repository ")
+            .appendValue(repositoryName)
+            .appendText(" with success status");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainTemplateMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainTemplateMatcher.java
new file mode 100644
index 0000000000..119e3a6a2f
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainTemplateMatcher.java
@@ -0,0 +1,43 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
+import org.opensearch.client.Client;
+
+import static java.util.Objects.requireNonNull;
+
+class ClusterContainTemplateMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private final String templateName;
+
+    public ClusterContainTemplateMatcher(String templateName) {
+        this.templateName = requireNonNull(templateName, "Index template name is required.");
+
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        var response = client.admin().indices().getTemplates(new GetIndexTemplatesRequest(templateName)).actionGet();
+        if (response.getIndexTemplates().isEmpty()) {
+            mismatchDescription.appendText("But template does not exists");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("template ").appendValue(templateName).appendText(" exists");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainTemplateWithAliasMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainTemplateWithAliasMatcher.java
new file mode 100644
index 0000000000..4addaa0dd5
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainTemplateWithAliasMatcher.java
@@ -0,0 +1,73 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
+import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
+import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.AliasMetadata;
+
+import static java.util.Objects.requireNonNull;
+
+class ClusterContainTemplateWithAliasMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private final String templateName;
+    private final String aliasName;
+
+    public ClusterContainTemplateWithAliasMatcher(String templateName, String aliasName) {
+        this.templateName = requireNonNull(templateName, "Index template name is required.");
+        this.aliasName = requireNonNull(aliasName, "Alias name is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        var response = client.admin().indices().getTemplates(new GetIndexTemplatesRequest(templateName)).actionGet();
+        if (response.getIndexTemplates().isEmpty()) {
+            mismatchDescription.appendText("but template does not exists");
+            return false;
+        }
+        Set<String> aliases = getAliases(response);
+        if (aliases.contains(aliasName) == false) {
+            mismatchDescription.appendText("alias ")
+                .appendValue(aliasName)
+                .appendText(" is not present in template, other aliases in template ")
+                .appendValue(aliases.stream().collect(Collectors.joining(", ")));
+            return false;
+        }
+        return true;
+    }
+
+    private Set<String> getAliases(GetIndexTemplatesResponse response) {
+        return response.getIndexTemplates()
+            .stream()
+            .map(metadata -> metadata.getAliases())
+            .flatMap(aliasMap -> aliasNames(aliasMap))
+            .collect(Collectors.toSet());
+    }
+
+    private Stream<String> aliasNames(Map<String, AliasMetadata> aliasMap) {
+        Iterable<Map.Entry<String, AliasMetadata>> iterable = () -> aliasMap.entrySet().iterator();
+        return StreamSupport.stream(iterable.spliterator(), false).map(entry -> entry.getValue().getAlias());
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("template ").appendValue(templateName).appendText(" exists and ");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsDocumentMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsDocumentMatcher.java
new file mode 100644
index 0000000000..3153214213
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsDocumentMatcher.java
@@ -0,0 +1,57 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.concurrent.ExecutionException;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetRequest;
+import org.opensearch.action.get.GetResponse;
+import org.opensearch.client.Client;
+
+import static java.util.Objects.requireNonNull;
+
+class ClusterContainsDocumentMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private static final Logger log = LogManager.getLogger(ClusterContainsDocumentMatcher.class);
+
+    private final String indexName;
+    private final String documentId;
+
+    ClusterContainsDocumentMatcher(String indexName, String documentId) {
+        this.indexName = requireNonNull(indexName, "Index name is required.");
+        this.documentId = requireNonNull(documentId, "Document id is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        try {
+            GetResponse response = client.get(new GetRequest(indexName, documentId)).get();
+            if (response.isExists() == false) {
+                mismatchDescription.appendText("Document does not exists");
+                return false;
+            }
+        } catch (InterruptedException | ExecutionException e) {
+            log.error("Cannot verify if cluster contains document '{}' in index '{}'.", documentId, indexName, e);
+            mismatchDescription.appendText("Exception occured during verification if cluster contain document").appendValue(e);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Cluster contain document in index ").appendValue(indexName).appendText(" with id ").appendValue(documentId);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsDocumentWithFieldValueMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsDocumentWithFieldValueMatcher.java
new file mode 100644
index 0000000000..21f062fabf
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsDocumentWithFieldValueMatcher.java
@@ -0,0 +1,85 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetRequest;
+import org.opensearch.action.get.GetResponse;
+import org.opensearch.client.Client;
+
+import static java.util.Objects.requireNonNull;
+
+class ClusterContainsDocumentWithFieldValueMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private static final Logger log = LogManager.getLogger(ClusterContainsDocumentWithFieldValueMatcher.class);
+
+    private final String indexName;
+    private final String documentId;
+
+    private final String fieldName;
+
+    private final Object fieldValue;
+
+    ClusterContainsDocumentWithFieldValueMatcher(String indexName, String documentId, String fieldName, Object fieldValue) {
+        this.indexName = requireNonNull(indexName, "Index name is required.");
+        this.documentId = requireNonNull(documentId, "Document id is required.");
+        this.fieldName = requireNonNull(fieldName, "Field name is required.");
+        this.fieldValue = requireNonNull(fieldValue, "Field value is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        try {
+            GetResponse response = client.get(new GetRequest(indexName, documentId)).get();
+            if (response.isExists() == false) {
+                mismatchDescription.appendText("Document does not exists");
+                return false;
+            }
+            Map<String, Object> source = response.getSource();
+            if (source == null) {
+                mismatchDescription.appendText("Cannot retrieve document source");
+                return false;
+            }
+            if (source.containsKey(fieldName) == false) {
+                mismatchDescription.appendText("document does not contain field ").appendValue(fieldName);
+                return false;
+            }
+            Object actualFieldValue = source.get(fieldName);
+            if (fieldValue.equals(actualFieldValue) == false) {
+                mismatchDescription.appendText(" document contain incorrect field value ").appendValue(actualFieldValue);
+                return false;
+            }
+        } catch (InterruptedException | ExecutionException e) {
+            log.error("Cannot verify if cluster contains document '{}' in index '{}'.", documentId, indexName, e);
+            mismatchDescription.appendText("Exception occured during verification if cluster contain document").appendValue(e);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Cluster contain document in index ")
+            .appendValue(indexName)
+            .appendText(" with id ")
+            .appendValue(documentId)
+            .appendText(" with field ")
+            .appendValue(fieldName)
+            .appendText(" which is equal to ")
+            .appendValue(fieldValue);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsSnapshotRepositoryMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsSnapshotRepositoryMatcher.java
new file mode 100644
index 0000000000..fcff8bd6cf
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterContainsSnapshotRepositoryMatcher.java
@@ -0,0 +1,67 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
+import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
+import org.opensearch.client.Client;
+import org.opensearch.client.ClusterAdminClient;
+import org.opensearch.repositories.RepositoryMissingException;
+
+import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.joining;
+
+class ClusterContainsSnapshotRepositoryMatcher extends TypeSafeDiagnosingMatcher<Client> {
+
+    private final String repositoryName;
+
+    public ClusterContainsSnapshotRepositoryMatcher(String repositoryName) {
+        this.repositoryName = requireNonNull(repositoryName, "Repository name is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        try {
+            ClusterAdminClient adminClient = client.admin().cluster();
+            GetRepositoriesRequest request = new GetRepositoriesRequest(new String[] { "*" });
+            GetRepositoriesResponse response = adminClient.getRepositories(request).actionGet();
+            if (response == null) {
+                mismatchDescription.appendText("Cannot check if cluster contain repository");
+                return false;
+            }
+            Set<String> actualRepositoryNames = response.repositories()
+                .stream()
+                .map(metadata -> metadata.name())
+                .collect(Collectors.toSet());
+            if (actualRepositoryNames.contains(repositoryName) == false) {
+                mismatchDescription.appendText("Cluster does not contain snapshot repository ")
+                    .appendValue(repositoryName)
+                    .appendText(", but the following repositories are defined in the cluster ")
+                    .appendValue(actualRepositoryNames.stream().collect(joining(", ")));
+                return false;
+            }
+        } catch (RepositoryMissingException e) {
+            mismatchDescription.appendText(" cluster does not contain any repository.");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Cluster contain snapshot repository with name ").appendValue(repositoryName);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterMatchers.java
new file mode 100644
index 0000000000..3829436d74
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ClusterMatchers.java
@@ -0,0 +1,79 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+public class ClusterMatchers {
+
+    private ClusterMatchers() {
+
+    }
+
+    public static Matcher<Client> clusterContainsDocument(String indexName, String documentId) {
+        return new ClusterContainsDocumentMatcher(indexName, documentId);
+    }
+
+    public static Matcher<Client> clusterContainsDocumentWithFieldValue(
+        String indexName,
+        String documentId,
+        String fieldName,
+        Object fieldValue
+    ) {
+        return new ClusterContainsDocumentWithFieldValueMatcher(indexName, documentId, fieldName, fieldValue);
+    }
+
+    public static Matcher<Client> clusterContainTemplate(String templateName) {
+        return new ClusterContainTemplateMatcher(templateName);
+    }
+
+    public static Matcher<Client> clusterContainTemplateWithAlias(String templateName, String aliasName) {
+        return new ClusterContainTemplateWithAliasMatcher(templateName, aliasName);
+    }
+
+    public static Matcher<Client> clusterContainsSnapshotRepository(String repositoryName) {
+        return new ClusterContainsSnapshotRepositoryMatcher(repositoryName);
+    }
+
+    public static Matcher<Client> clusterContainSuccessSnapshot(String repositoryName, String snapshotName) {
+        return new ClusterContainSuccessSnapshotMatcher(repositoryName, snapshotName);
+    }
+
+    public static Matcher<Client> snapshotInClusterDoesNotExists(String repositoryName, String snapshotName) {
+        return new SnapshotInClusterDoesNotExist(repositoryName, snapshotName);
+    }
+
+    public static Matcher<Client> aliasExists(String aliasName) {
+        return new AliasExistsMatcher(aliasName);
+    }
+
+    public static Matcher<LocalCluster> indexExists(String expectedIndexName) {
+        return new IndexExistsMatcher(expectedIndexName);
+    }
+
+    public static Matcher<LocalCluster> indexStateIsEqualTo(String expectedIndexName, IndexMetadata.State expectedState) {
+        return new IndexStateIsEqualToMatcher(expectedIndexName, expectedState);
+    }
+
+    public static Matcher<LocalCluster> indexSettingsContainValues(String expectedIndexName, Settings expectedSettings) {
+        return new IndexSettingsContainValuesMatcher(expectedIndexName, expectedSettings);
+    }
+
+    public static Matcher<LocalCluster> indexMappingIsEqualTo(String expectedIndexName, Map<String, ?> expectedMapping) {
+        return new IndexMappingIsEqualToMatcher(expectedIndexName, expectedMapping);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainNotEmptyScrollingIdMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainNotEmptyScrollingIdMatcher.java
new file mode 100644
index 0000000000..03b9b6bab8
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainNotEmptyScrollingIdMatcher.java
@@ -0,0 +1,34 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.apache.commons.lang3.StringUtils;
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+
+class ContainNotEmptyScrollingIdMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        String scrollId = searchResponse.getScrollId();
+        if (StringUtils.isEmpty(scrollId)) {
+            mismatchDescription.appendText("scrolling id is null or empty");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Search response should contain scrolling id.");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsAggregationWithNameAndTypeMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsAggregationWithNameAndTypeMatcher.java
new file mode 100644
index 0000000000..b1ef21c922
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsAggregationWithNameAndTypeMatcher.java
@@ -0,0 +1,57 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.aggregations.Aggregation;
+import org.opensearch.search.aggregations.Aggregations;
+
+import static java.util.Objects.requireNonNull;
+
+class ContainsAggregationWithNameAndTypeMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final String expectedAggregationName;
+    private final String expectedAggregationType;
+
+    public ContainsAggregationWithNameAndTypeMatcher(String expectedAggregationName, String expectedAggregationType) {
+        this.expectedAggregationName = requireNonNull(expectedAggregationName, "Aggregation name is required");
+        this.expectedAggregationType = requireNonNull(expectedAggregationType, "Expected aggregation type is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse response, Description mismatchDescription) {
+        Aggregations aggregations = response.getAggregations();
+        if (aggregations == null) {
+            mismatchDescription.appendText("search response does not contain aggregations");
+            return false;
+        }
+        Aggregation aggregation = aggregations.get(expectedAggregationName);
+        if (aggregation == null) {
+            mismatchDescription.appendText("Response does not contain aggregation with name ").appendValue(expectedAggregationName);
+            return false;
+        }
+        if (expectedAggregationType.equals(aggregation.getType()) == false) {
+            mismatchDescription.appendText("Aggregation contain incorrect type which is ").appendValue(aggregation.getType());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Search response should contains aggregation results with name ")
+            .appendValue(expectedAggregationName)
+            .appendText(" and type ")
+            .appendValue(expectedAggregationType);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsExactlyIndicesMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsExactlyIndicesMatcher.java
new file mode 100644
index 0000000000..9b597ad1c8
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsExactlyIndicesMatcher.java
@@ -0,0 +1,46 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Set;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+
+import static java.util.Objects.isNull;
+
+class ContainsExactlyIndicesMatcher extends TypeSafeDiagnosingMatcher<FieldCapabilitiesResponse> {
+
+    private final Set<String> expectedIndices;
+
+    ContainsExactlyIndicesMatcher(String... expectedIndices) {
+        if (isNull(expectedIndices) || expectedIndices.length == 0) {
+            throw new IllegalArgumentException("expectedIndices cannot be null or empty");
+        }
+        this.expectedIndices = Set.of(expectedIndices);
+    }
+
+    @Override
+    protected boolean matchesSafely(FieldCapabilitiesResponse response, Description mismatchDescription) {
+        Set<String> actualIndices = Set.of(response.getIndices());
+        if (!expectedIndices.equals(actualIndices)) {
+            mismatchDescription.appendText("Actual indices: ").appendValue(actualIndices);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response contains indices: ").appendValue(expectedIndices);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsFieldWithTypeMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsFieldWithTypeMatcher.java
new file mode 100644
index 0000000000..cd6b4b05cc
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ContainsFieldWithTypeMatcher.java
@@ -0,0 +1,55 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.fieldcaps.FieldCapabilities;
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class ContainsFieldWithTypeMatcher extends TypeSafeDiagnosingMatcher<FieldCapabilitiesResponse> {
+
+    private final String expectedFieldName;
+    private final String expectedFieldType;
+
+    ContainsFieldWithTypeMatcher(String expectedFieldName, String expectedFieldType) {
+        this.expectedFieldName = requireNonNull(expectedFieldName, "Field name is required");
+        ;
+        this.expectedFieldType = requireNonNull(expectedFieldType, "Field type is required");
+        ;
+    }
+
+    @Override
+    protected boolean matchesSafely(FieldCapabilitiesResponse response, Description mismatchDescription) {
+        Map<String, Map<String, FieldCapabilities>> fieldCapabilitiesMap = response.get();
+        if (!fieldCapabilitiesMap.containsKey(expectedFieldName)) {
+            mismatchDescription.appendText("Response does not contain field with name ").appendText(expectedFieldName);
+            return false;
+        }
+        if (!fieldCapabilitiesMap.get(expectedFieldName).containsKey(expectedFieldType)) {
+            mismatchDescription.appendText("Field type does not match ").appendText(expectedFieldType);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response contains field with name ")
+            .appendValue(expectedFieldName)
+            .appendText(" and type ")
+            .appendValue(expectedFieldType);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/DeletePitContainsExactlyIdsResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/DeletePitContainsExactlyIdsResponseMatcher.java
new file mode 100644
index 0000000000..788d023447
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/DeletePitContainsExactlyIdsResponseMatcher.java
@@ -0,0 +1,48 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.DeletePitInfo;
+import org.opensearch.action.search.DeletePitResponse;
+
+import static java.util.Objects.isNull;
+
+class DeletePitContainsExactlyIdsResponseMatcher extends TypeSafeDiagnosingMatcher<DeletePitResponse> {
+
+    private final Set<String> expectedPitIds;
+
+    DeletePitContainsExactlyIdsResponseMatcher(String[] expectedPitIds) {
+        if (isNull(expectedPitIds) || 0 == expectedPitIds.length) {
+            throw new IllegalArgumentException("expectedPitIds cannot be null or empty");
+        }
+        this.expectedPitIds = Set.of(expectedPitIds);
+    }
+
+    @Override
+    protected boolean matchesSafely(DeletePitResponse response, Description mismatchDescription) {
+        Set<String> actualPitIds = response.getDeletePitResults().stream().map(DeletePitInfo::getPitId).collect(Collectors.toSet());
+        if (!actualPitIds.equals(expectedPitIds)) {
+            mismatchDescription.appendText("Actual pit ids: ").appendValue(actualPitIds);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Should contain exactly pit with ids: ").appendValue(expectedPitIds);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/DeleteResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/DeleteResponseMatchers.java
new file mode 100644
index 0000000000..4112d0bab8
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/DeleteResponseMatchers.java
@@ -0,0 +1,23 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.delete.DeleteResponse;
+
+public class DeleteResponseMatchers {
+
+    private DeleteResponseMatchers() {}
+
+    public static Matcher<DeleteResponse> isSuccessfulDeleteResponse() {
+        return new SuccessfulDeleteResponseMatcher();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ExactNumberOfAuditsFulfillPredicateMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExactNumberOfAuditsFulfillPredicateMatcher.java
new file mode 100644
index 0000000000..9bbe966e07
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExactNumberOfAuditsFulfillPredicateMatcher.java
@@ -0,0 +1,45 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.hamcrest.Description;
+
+import org.opensearch.security.auditlog.impl.AuditMessage;
+
+class ExactNumberOfAuditsFulfillPredicateMatcher extends AuditsFulfillPredicateMatcher {
+
+    private final long exactNumberOfAuditMessages;
+
+    public ExactNumberOfAuditsFulfillPredicateMatcher(long exactNumberOfAuditMessages, Predicate<AuditMessage> predicate) {
+        super(predicate);
+        this.exactNumberOfAuditMessages = exactNumberOfAuditMessages;
+    }
+
+    @Override
+    protected boolean matchesSafely(List<AuditMessage> audits, Description mismatchDescription) {
+        long count = countAuditsWhichMatchPredicate(audits);
+        if (exactNumberOfAuditMessages != count) {
+            mismatchDescription.appendText(" only ")
+                .appendValue(count)
+                .appendText(" match predicate. Examined audit logs ")
+                .appendText(auditMessagesToString(audits));
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendValue(exactNumberOfAuditMessages).appendText(" audit records should match predicate ").appendValue(predicate);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionErrorMessageMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionErrorMessageMatcher.java
new file mode 100644
index 0000000000..2e07bd0ec5
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionErrorMessageMatcher.java
@@ -0,0 +1,43 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import static java.util.Objects.requireNonNull;
+
+class ExceptionErrorMessageMatcher extends TypeSafeDiagnosingMatcher<Throwable> {
+
+    private final Matcher<String> errorMessageMatcher;
+
+    public ExceptionErrorMessageMatcher(Matcher<String> errorMessageMatcher) {
+        this.errorMessageMatcher = requireNonNull(errorMessageMatcher, "Error message matcher is required");
+    }
+
+    @Override
+    protected boolean matchesSafely(Throwable ex, Description mismatchDescription) {
+        boolean matches = errorMessageMatcher.matches(ex.getMessage());
+        if (matches == false) {
+            mismatchDescription.appendText("Exception of class ")
+                .appendValue(ex.getClass().getCanonicalName())
+                .appendText("contains unexpected error message which is ")
+                .appendValue(ex.getMessage());
+        }
+        return matches;
+
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Error message in exception matches").appendValue(errorMessageMatcher);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionHasCauseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionHasCauseMatcher.java
new file mode 100644
index 0000000000..ed1b203898
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionHasCauseMatcher.java
@@ -0,0 +1,43 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Objects;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+class ExceptionHasCauseMatcher extends TypeSafeDiagnosingMatcher<Throwable> {
+
+    private final Class<? extends Throwable> expectedCauseType;
+
+    public ExceptionHasCauseMatcher(Class<? extends Throwable> expectedCauseType) {
+        this.expectedCauseType = Objects.requireNonNull(expectedCauseType, "Exception cause type is required");
+    }
+
+    @Override
+    protected boolean matchesSafely(Throwable throwable, Description mismatchDescription) {
+        Throwable cause = throwable.getCause();
+        if (cause == null) {
+            mismatchDescription.appendText("exception cause is null");
+            return false;
+        }
+        if (expectedCauseType.isInstance(cause) == false) {
+            mismatchDescription.appendText(" cause is instance of ").appendValue(cause.getClass());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Exception cause is instance of ").appendValue(expectedCauseType);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionMatcherAssert.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionMatcherAssert.java
new file mode 100644
index 0000000000..671f22b8e3
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/ExceptionMatcherAssert.java
@@ -0,0 +1,40 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import static java.util.Objects.requireNonNull;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.notNullValue;
+
+public class ExceptionMatcherAssert {
+
+    @FunctionalInterface
+    public interface ThrowingCallable {
+        void call() throws Exception;
+    }
+
+    public static void assertThatThrownBy(ThrowingCallable throwingCallable, Matcher<? super Throwable> matcher) {
+        Throwable expectedException = catchThrowable(throwingCallable);
+        assertThat("Expected exception was not thrown", expectedException, notNullValue());
+        assertThat(expectedException, matcher);
+    }
+
+    public static Throwable catchThrowable(ThrowingCallable throwingCallable) {
+        Throwable expectedException = null;
+        try {
+            requireNonNull(throwingCallable, "ThrowingCallable must not be null.").call();
+        } catch (Throwable e) {
+            expectedException = e;
+        }
+        return expectedException;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/FailureBulkResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/FailureBulkResponseMatcher.java
new file mode 100644
index 0000000000..3d912e0283
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/FailureBulkResponseMatcher.java
@@ -0,0 +1,32 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.bulk.BulkResponse;
+
+class FailureBulkResponseMatcher extends TypeSafeDiagnosingMatcher<BulkResponse> {
+
+    @Override
+    protected boolean matchesSafely(BulkResponse response, Description mismatchDescription) {
+        if (response.hasFailures() == false) {
+            mismatchDescription.appendText(" bulk operation was executed correctly what is not expected.");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("bulk operation failure");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/FieldCapabilitiesResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/FieldCapabilitiesResponseMatchers.java
new file mode 100644
index 0000000000..2a78c7b71c
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/FieldCapabilitiesResponseMatchers.java
@@ -0,0 +1,32 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+
+public class FieldCapabilitiesResponseMatchers {
+
+    private FieldCapabilitiesResponseMatchers() {}
+
+    public static Matcher<FieldCapabilitiesResponse> containsExactlyIndices(String... expectedIndices) {
+        return new ContainsExactlyIndicesMatcher(expectedIndices);
+    }
+
+    public static Matcher<FieldCapabilitiesResponse> containsFieldWithNameAndType(String expectedFieldName, String expectedFieldType) {
+        return new ContainsFieldWithTypeMatcher(expectedFieldName, expectedFieldType);
+    }
+
+    public static Matcher<FieldCapabilitiesResponse> numberOfFieldsIsEqualTo(int expectedNumberOfFields) {
+        return new NumberOfFieldsIsEqualToMatcher(expectedNumberOfFields);
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetAllPitsContainsExactlyIdsResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetAllPitsContainsExactlyIdsResponseMatcher.java
new file mode 100644
index 0000000000..c568bfa9d7
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetAllPitsContainsExactlyIdsResponseMatcher.java
@@ -0,0 +1,48 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.GetAllPitNodesResponse;
+import org.opensearch.action.search.ListPitInfo;
+
+import static java.util.Objects.isNull;
+
+class GetAllPitsContainsExactlyIdsResponseMatcher extends TypeSafeDiagnosingMatcher<GetAllPitNodesResponse> {
+
+    private final Set<String> expectedPitIds;
+
+    GetAllPitsContainsExactlyIdsResponseMatcher(String[] expectedPitIds) {
+        if (isNull(expectedPitIds) || 0 == expectedPitIds.length) {
+            throw new IllegalArgumentException("expectedPitIds cannot be null or empty");
+        }
+        this.expectedPitIds = Set.of(expectedPitIds);
+    }
+
+    @Override
+    protected boolean matchesSafely(GetAllPitNodesResponse response, Description mismatchDescription) {
+        Set<String> actualPitIds = response.getPitInfos().stream().map(ListPitInfo::getPitId).collect(Collectors.toSet());
+        if (!actualPitIds.equals(expectedPitIds)) {
+            mismatchDescription.appendText("Actual pit ids: ").appendValue(actualPitIds);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Should contain exactly pit with ids: ").appendValue(expectedPitIds);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetIndexResponseContainsIndicesMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetIndexResponseContainsIndicesMatcher.java
new file mode 100644
index 0000000000..20f02b1319
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetIndexResponseContainsIndicesMatcher.java
@@ -0,0 +1,49 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.client.indices.GetIndexResponse;
+
+import static java.util.Objects.isNull;
+
+class GetIndexResponseContainsIndicesMatcher extends TypeSafeDiagnosingMatcher<GetIndexResponse> {
+
+    private final String[] expectedIndices;
+
+    GetIndexResponseContainsIndicesMatcher(String[] expectedIndices) {
+        if (isNull(expectedIndices) || 0 == expectedIndices.length) {
+            throw new IllegalArgumentException("expectedIndices cannot be null or empty");
+        }
+        this.expectedIndices = expectedIndices;
+    }
+
+    @Override
+    protected boolean matchesSafely(GetIndexResponse response, Description mismatchDescription) {
+        List<String> actual = Arrays.asList(response.getIndices());
+        for (String index : expectedIndices) {
+            if (!actual.contains(index)) {
+                mismatchDescription.appendText("Actual indices: ").appendValue(response.getIndices());
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response should contain indices: ").appendValue(expectedIndices);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetMappingsResponseContainsIndicesMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetMappingsResponseContainsIndicesMatcher.java
new file mode 100644
index 0000000000..8e2b3da097
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetMappingsResponseContainsIndicesMatcher.java
@@ -0,0 +1,49 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.client.indices.GetMappingsResponse;
+import org.opensearch.cluster.metadata.MappingMetadata;
+
+import static java.util.Objects.isNull;
+
+class GetMappingsResponseContainsIndicesMatcher extends TypeSafeDiagnosingMatcher<GetMappingsResponse> {
+
+    private final String[] expectedIndices;
+
+    GetMappingsResponseContainsIndicesMatcher(String[] expectedIndices) {
+        if (isNull(expectedIndices) || 0 == expectedIndices.length) {
+            throw new IllegalArgumentException("expectedIndices cannot be null or empty");
+        }
+        this.expectedIndices = expectedIndices;
+    }
+
+    @Override
+    protected boolean matchesSafely(GetMappingsResponse response, Description mismatchDescription) {
+        Map<String, MappingMetadata> indicesMappings = response.mappings();
+        for (String index : expectedIndices) {
+            if (!indicesMappings.containsKey(index)) {
+                mismatchDescription.appendText("Response contains mappings of indices: ").appendValue(indicesMappings.keySet());
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response should contain mappings of indices: ").appendValue(expectedIndices);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseContainOnlyDocumentIdMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseContainOnlyDocumentIdMatcher.java
new file mode 100644
index 0000000000..e6d7f02c6e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseContainOnlyDocumentIdMatcher.java
@@ -0,0 +1,54 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class GetResponseContainOnlyDocumentIdMatcher extends TypeSafeDiagnosingMatcher<GetResponse> {
+
+    private final String indexName;
+    private final String documentId;
+
+    public GetResponseContainOnlyDocumentIdMatcher(String indexName, String documentId) {
+        this.indexName = requireNonNull(indexName, "Index name is required");
+        this.documentId = requireNonNull(documentId, "Document id is required");
+    }
+
+    @Override
+    protected boolean matchesSafely(GetResponse response, Description mismatchDescription) {
+        if (indexName.equals(response.getIndex()) == false) {
+            mismatchDescription.appendText(" index name ").appendValue(response.getIndex()).appendText(" is incorrect ");
+            return false;
+        }
+        if (documentId.equals(response.getId()) == false) {
+            mismatchDescription.appendText(" id ").appendValue(response.getId()).appendText(" is incorrect ");
+            return false;
+        }
+        if (response.isExists()) {
+            mismatchDescription.appendText(" document exist what is not desired ");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response should contain document id from index ")
+            .appendValue(indexName)
+            .appendText(" with id ")
+            .appendValue(documentId)
+            .appendText(" but document should not be present ");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseContainsDocumentWithIdMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseContainsDocumentWithIdMatcher.java
new file mode 100644
index 0000000000..aa9d702243
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseContainsDocumentWithIdMatcher.java
@@ -0,0 +1,57 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class GetResponseContainsDocumentWithIdMatcher extends TypeSafeDiagnosingMatcher<GetResponse> {
+
+    private final String indexName;
+    private final String documentId;
+
+    public GetResponseContainsDocumentWithIdMatcher(String indexName, String documentId) {
+        this.indexName = requireNonNull(indexName, "Index name is required");
+        this.documentId = requireNonNull(documentId, "Document id is required");
+    }
+
+    @Override
+    protected boolean matchesSafely(GetResponse response, Description mismatchDescription) {
+        if (indexName.equals(response.getIndex()) == false) {
+            mismatchDescription.appendText("Document should not belong to index ").appendValue(response.getIndex());
+            return false;
+        }
+        if (documentId.equals(response.getId()) == false) {
+            mismatchDescription.appendText("Document contain incorrect id which is ").appendValue(response.getId());
+            return false;
+        }
+        if (response.isExists() == false) {
+            mismatchDescription.appendText("Document does not exist or is inaccessible");
+            return false;
+        }
+        if (response.isSourceEmpty()) {
+            mismatchDescription.appendText("Document source is empty");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response should contain document from index ")
+            .appendValue(indexName)
+            .appendText(" with id ")
+            .appendValue(documentId);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentContainsExactlyFieldsWithNamesMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentContainsExactlyFieldsWithNamesMatcher.java
new file mode 100644
index 0000000000..66bdc0a9b7
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentContainsExactlyFieldsWithNamesMatcher.java
@@ -0,0 +1,51 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetResponse;
+
+import static java.util.Objects.isNull;
+
+class GetResponseDocumentContainsExactlyFieldsWithNamesMatcher extends TypeSafeDiagnosingMatcher<GetResponse> {
+
+    private final Set<String> expectedFieldsNames;
+
+    GetResponseDocumentContainsExactlyFieldsWithNamesMatcher(String... expectedFieldsNames) {
+        if (isNull(expectedFieldsNames) || expectedFieldsNames.length == 0) {
+            throw new IllegalArgumentException("expectedFieldsNames cannot be null or empty");
+        }
+        this.expectedFieldsNames = Set.of(expectedFieldsNames);
+    }
+
+    @Override
+    protected boolean matchesSafely(GetResponse response, Description mismatchDescription) {
+        Map<String, Object> sourceMap = response.getSourceAsMap();
+        Set<String> actualFieldsNames = sourceMap.keySet();
+        if (!expectedFieldsNames.equals(actualFieldsNames)) {
+            mismatchDescription.appendValue("Document with id ")
+                .appendValue(response.getId())
+                .appendText(" contains fields with names: ")
+                .appendValue(actualFieldsNames);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Document contain exactly fields with names: ").appendValue(expectedFieldsNames);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentDoesNotContainFieldMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentDoesNotContainFieldMatcher.java
new file mode 100644
index 0000000000..508e5b8b61
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentDoesNotContainFieldMatcher.java
@@ -0,0 +1,47 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class GetResponseDocumentDoesNotContainFieldMatcher extends TypeSafeDiagnosingMatcher<GetResponse> {
+
+    private final String fieldName;
+
+    public GetResponseDocumentDoesNotContainFieldMatcher(String fieldName) {
+        this.fieldName = requireNonNull(fieldName, "Field name is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(GetResponse response, Description mismatchDescription) {
+        Map<String, Object> source = response.getSource();
+        if (source == null) {
+            mismatchDescription.appendText("Source is not available in search results");
+            return false;
+        }
+        if (source.containsKey(fieldName)) {
+            mismatchDescription.appendText("Document contains field ").appendValue(fieldName);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Document does not contain field ").appendValue(fieldName);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentFieldValueMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentFieldValueMatcher.java
new file mode 100644
index 0000000000..78bcae5494
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseDocumentFieldValueMatcher.java
@@ -0,0 +1,57 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.GetResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class GetResponseDocumentFieldValueMatcher extends TypeSafeDiagnosingMatcher<GetResponse> {
+
+    private final String fieldName;
+    private final Object fieldValue;
+
+    public GetResponseDocumentFieldValueMatcher(String fieldName, Object fieldValue) {
+        this.fieldName = requireNonNull(fieldName, "Field name is required.");
+        this.fieldValue = requireNonNull(fieldValue, "Field value is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(GetResponse response, Description mismatchDescription) {
+        Map<String, Object> source = response.getSource();
+        if (source == null) {
+            mismatchDescription.appendText("Source is not available in search results");
+            return false;
+        }
+        if (source.containsKey(fieldName) == false) {
+            mismatchDescription.appendText("Document does not contain field ").appendValue(fieldName);
+            return false;
+        }
+        Object actualFieldValue = source.get(fieldName);
+        if (fieldValue.equals(actualFieldValue) == false) {
+            mismatchDescription.appendText("Field ")
+                .appendValue(fieldName)
+                .appendText(" has incorrect value ")
+                .appendValue(actualFieldValue);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Document contain field ").appendValue(fieldName).appendText(" with value ").appendValue(fieldValue);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseMatchers.java
new file mode 100644
index 0000000000..89c183fc34
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetResponseMatchers.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.get.GetResponse;
+
+public class GetResponseMatchers {
+
+    private GetResponseMatchers() {}
+
+    public static Matcher<GetResponse> containDocument(String indexName, String documentId) {
+        return new GetResponseContainsDocumentWithIdMatcher(indexName, documentId);
+    }
+
+    public static Matcher<GetResponse> containOnlyDocumentId(String indexName, String documentId) {
+        return new GetResponseContainOnlyDocumentIdMatcher(indexName, documentId);
+    }
+
+    public static Matcher<GetResponse> documentContainField(String fieldName, Object fieldValue) {
+        return new GetResponseDocumentFieldValueMatcher(fieldName, fieldValue);
+    }
+
+    public static Matcher<GetResponse> documentDoesNotContainField(String fieldName) {
+        return new GetResponseDocumentDoesNotContainFieldMatcher(fieldName);
+    }
+
+    public static Matcher<GetResponse> documentContainsExactlyFieldsWithNames(String... expectedFieldsNames) {
+        return new GetResponseDocumentContainsExactlyFieldsWithNamesMatcher(expectedFieldsNames);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/GetSettingsResponseContainsIndicesMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetSettingsResponseContainsIndicesMatcher.java
new file mode 100644
index 0000000000..c96deef001
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/GetSettingsResponseContainsIndicesMatcher.java
@@ -0,0 +1,50 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
+import org.opensearch.common.settings.Settings;
+
+import static java.util.Objects.isNull;
+
+class GetSettingsResponseContainsIndicesMatcher extends TypeSafeDiagnosingMatcher<GetSettingsResponse> {
+
+    private final String[] expectedIndices;
+
+    GetSettingsResponseContainsIndicesMatcher(String[] expectedIndices) {
+        if (isNull(expectedIndices) || 0 == expectedIndices.length) {
+            throw new IllegalArgumentException("expectedIndices cannot be null or empty");
+        }
+        this.expectedIndices = expectedIndices;
+    }
+
+    @Override
+    protected boolean matchesSafely(GetSettingsResponse response, Description mismatchDescription) {
+
+        final Map<String, Settings> indexToSettings = response.getIndexToSettings();
+        for (String index : expectedIndices) {
+            if (!indexToSettings.containsKey(index)) {
+                mismatchDescription.appendText("Response contains settings of indices: ").appendValue(indexToSettings.keySet());
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response should contain settings of indices: ").appendValue(expectedIndices);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexExistsMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexExistsMatcher.java
new file mode 100644
index 0000000000..aab3d426d2
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexExistsMatcher.java
@@ -0,0 +1,49 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.exists.indices.IndicesExistsRequest;
+import org.opensearch.action.admin.indices.exists.indices.IndicesExistsResponse;
+import org.opensearch.client.Client;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static java.util.Objects.requireNonNull;
+
+class IndexExistsMatcher extends TypeSafeDiagnosingMatcher<LocalCluster> {
+
+    private final String expectedIndexName;
+
+    IndexExistsMatcher(String expectedIndexName) {
+        this.expectedIndexName = requireNonNull(expectedIndexName);
+    }
+
+    @Override
+    protected boolean matchesSafely(LocalCluster cluster, Description mismatchDescription) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            IndicesExistsResponse indicesExistsResponse = client.admin()
+                .indices()
+                .exists(new IndicesExistsRequest(expectedIndexName))
+                .actionGet();
+            if (!indicesExistsResponse.isExists()) {
+                mismatchDescription.appendText("Index ").appendValue(expectedIndexName).appendValue(" does not exist");
+                return false;
+            }
+            return true;
+        }
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Index ").appendValue(expectedIndexName).appendText(" exists");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexMappingIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexMappingIsEqualToMatcher.java
new file mode 100644
index 0000000000..ead0e9d1d7
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexMappingIsEqualToMatcher.java
@@ -0,0 +1,67 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest;
+import org.opensearch.action.admin.indices.mapping.get.GetMappingsResponse;
+import org.opensearch.client.Client;
+import org.opensearch.index.IndexNotFoundException;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static java.util.Objects.isNull;
+import static java.util.Objects.requireNonNull;
+
+class IndexMappingIsEqualToMatcher extends TypeSafeDiagnosingMatcher<LocalCluster> {
+
+    private final String expectedIndexName;
+    private final Map<String, ?> expectedMapping;
+
+    IndexMappingIsEqualToMatcher(String expectedIndexName, Map<String, ?> expectedMapping) {
+        this.expectedIndexName = requireNonNull(expectedIndexName);
+        if (isNull(expectedMapping) || expectedMapping.isEmpty()) {
+            throw new IllegalArgumentException("expectedMapping cannot be null or empty");
+        }
+        this.expectedMapping = expectedMapping;
+    }
+
+    @Override
+    protected boolean matchesSafely(LocalCluster cluster, Description mismatchDescription) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            GetMappingsResponse response = client.admin()
+                .indices()
+                .getMappings(new GetMappingsRequest().indices(expectedIndexName))
+                .actionGet();
+
+            Map<String, Object> actualIndexMapping = response.getMappings().get(expectedIndexName).sourceAsMap();
+
+            if (!expectedMapping.equals(actualIndexMapping)) {
+                mismatchDescription.appendText("Actual mapping ").appendValue(actualIndexMapping).appendText(" does not match expected");
+                return false;
+            }
+            return true;
+        } catch (IndexNotFoundException e) {
+            mismatchDescription.appendText("Index: ").appendValue(expectedIndexName).appendText(" does not exist");
+            return false;
+        }
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Index ")
+            .appendValue(expectedIndexName)
+            .appendText(". Mapping should be equal to: ")
+            .appendValue(expectedMapping);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexResponseMatchers.java
new file mode 100644
index 0000000000..88f3ac099d
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexResponseMatchers.java
@@ -0,0 +1,57 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
+import org.opensearch.action.admin.indices.open.OpenIndexResponse;
+import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
+import org.opensearch.client.indices.CloseIndexResponse;
+import org.opensearch.client.indices.CreateIndexResponse;
+import org.opensearch.client.indices.GetIndexResponse;
+import org.opensearch.client.indices.GetMappingsResponse;
+import org.opensearch.client.indices.ResizeResponse;
+
+public class IndexResponseMatchers {
+
+    public static Matcher<CreateIndexResponse> isSuccessfulCreateIndexResponse(String expectedIndexName) {
+        return new SuccessfulCreateIndexResponseMatcher(expectedIndexName);
+    }
+
+    public static Matcher<GetIndexResponse> getIndexResponseContainsIndices(String... expectedIndices) {
+        return new GetIndexResponseContainsIndicesMatcher(expectedIndices);
+    }
+
+    public static Matcher<CloseIndexResponse> isSuccessfulCloseIndexResponse() {
+        return new SuccessfulCloseIndexResponseMatcher();
+    }
+
+    public static Matcher<OpenIndexResponse> isSuccessfulOpenIndexResponse() {
+        return new SuccessfulOpenIndexResponseMatcher();
+    }
+
+    public static Matcher<ResizeResponse> isSuccessfulResizeResponse(String expectedIndexName) {
+        return new SuccessfulResizeResponseMatcher(expectedIndexName);
+    }
+
+    public static Matcher<GetSettingsResponse> getSettingsResponseContainsIndices(String... expectedIndices) {
+        return new GetSettingsResponseContainsIndicesMatcher(expectedIndices);
+    }
+
+    public static Matcher<ClearIndicesCacheResponse> isSuccessfulClearIndicesCacheResponse() {
+        return new SuccessfulClearIndicesCacheResponseMatcher();
+    }
+
+    public static Matcher<GetMappingsResponse> getMappingsResponseContainsIndices(String... expectedIndices) {
+        return new GetMappingsResponseContainsIndicesMatcher(expectedIndices);
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexSettingsContainValuesMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexSettingsContainValuesMatcher.java
new file mode 100644
index 0000000000..6bb89c6cae
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexSettingsContainValuesMatcher.java
@@ -0,0 +1,75 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest;
+import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse;
+import org.opensearch.client.Client;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.index.IndexNotFoundException;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static java.util.Objects.isNull;
+import static java.util.Objects.requireNonNull;
+
+class IndexSettingsContainValuesMatcher extends TypeSafeDiagnosingMatcher<LocalCluster> {
+
+    private final String expectedIndexName;
+    private final Settings expectedSettings;
+
+    IndexSettingsContainValuesMatcher(String expectedIndexName, Settings expectedSettings) {
+        this.expectedIndexName = requireNonNull(expectedIndexName);
+        if (isNull(expectedSettings) || expectedSettings.isEmpty()) {
+            throw new IllegalArgumentException("expectedSettings cannot be null or empty");
+        }
+        this.expectedSettings = expectedSettings;
+    }
+
+    @Override
+    protected boolean matchesSafely(LocalCluster cluster, Description mismatchDescription) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            GetSettingsResponse response = client.admin()
+                .indices()
+                .getSettings(new GetSettingsRequest().indices(expectedIndexName))
+                .actionGet();
+
+            Settings actualSettings = response.getIndexToSettings().get(expectedIndexName);
+
+            for (String setting : expectedSettings.keySet()) {
+                if (isNull(actualSettings.get(setting))) {
+                    mismatchDescription.appendValue("Value of ").appendValue(setting).appendText(" property is missing");
+                    return false;
+                }
+                if (!expectedSettings.get(setting).equals(actualSettings.get(setting))) {
+                    mismatchDescription.appendText("Actual value of `")
+                        .appendValue(setting)
+                        .appendText("` property: ")
+                        .appendValue(actualSettings.get(setting));
+                    return false;
+                }
+            }
+            return true;
+        } catch (IndexNotFoundException e) {
+            mismatchDescription.appendText("Index: ").appendValue(expectedIndexName).appendText(" does not exist");
+            return false;
+        }
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Settings of index ")
+            .appendValue(expectedIndexName)
+            .appendText(" should contain values: ")
+            .appendValue(expectedSettings);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexStateIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexStateIsEqualToMatcher.java
new file mode 100644
index 0000000000..87270b1388
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/IndexStateIsEqualToMatcher.java
@@ -0,0 +1,61 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.cluster.state.ClusterStateRequest;
+import org.opensearch.action.admin.cluster.state.ClusterStateResponse;
+import org.opensearch.client.Client;
+import org.opensearch.cluster.metadata.IndexMetadata;
+import org.opensearch.test.framework.cluster.LocalCluster;
+
+import static java.util.Objects.requireNonNull;
+
+class IndexStateIsEqualToMatcher extends TypeSafeDiagnosingMatcher<LocalCluster> {
+
+    private final String expectedIndexName;
+    private final IndexMetadata.State expectedState;
+
+    IndexStateIsEqualToMatcher(String expectedIndexName, IndexMetadata.State expectedState) {
+        this.expectedIndexName = requireNonNull(expectedIndexName);
+        this.expectedState = requireNonNull(expectedState);
+    }
+
+    @Override
+    protected boolean matchesSafely(LocalCluster cluster, Description mismatchDescription) {
+        try (Client client = cluster.getInternalNodeClient()) {
+            ClusterStateRequest clusterStateRequest = new ClusterStateRequest().indices(expectedIndexName);
+            ClusterStateResponse clusterStateResponse = client.admin().cluster().state(clusterStateRequest).actionGet();
+
+            Map<String, IndexMetadata> indicesMetadata = clusterStateResponse.getState().getMetadata().indices();
+            if (!indicesMetadata.containsKey(expectedIndexName)) {
+                mismatchDescription.appendValue("Index does not exist");
+            }
+            IndexMetadata indexMetadata = indicesMetadata.get(expectedIndexName);
+            if (expectedState != indexMetadata.getState()) {
+                mismatchDescription.appendValue("Actual index state is equal to ").appendValue(indexMetadata.getState().name());
+                return false;
+            }
+            return true;
+        }
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Index: ")
+            .appendValue(expectedIndexName)
+            .appendText(" . State should be equal to ")
+            .appendValue(expectedState.name());
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/MultiGetResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/MultiGetResponseMatchers.java
new file mode 100644
index 0000000000..c2e86b1310
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/MultiGetResponseMatchers.java
@@ -0,0 +1,28 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.get.MultiGetResponse;
+
+public class MultiGetResponseMatchers {
+
+    private MultiGetResponseMatchers() {}
+
+    public static Matcher<MultiGetResponse> isSuccessfulMultiGetResponse() {
+        return new SuccessfulMultiGetResponseMatcher();
+    }
+
+    public static Matcher<MultiGetResponse> numberOfGetItemResponsesIsEqualTo(int expectedNumberOfResponses) {
+        return new NumberOfGetItemResponsesIsEqualToMatcher(expectedNumberOfResponses);
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/MultiSearchResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/MultiSearchResponseMatchers.java
new file mode 100644
index 0000000000..9709249d11
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/MultiSearchResponseMatchers.java
@@ -0,0 +1,28 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.search.MultiSearchResponse;
+
+public class MultiSearchResponseMatchers {
+
+    private MultiSearchResponseMatchers() {}
+
+    public static Matcher<MultiSearchResponse> isSuccessfulMultiSearchResponse() {
+        return new SuccessfulMultiSearchResponseMatcher();
+    }
+
+    public static Matcher<MultiSearchResponse> numberOfSearchItemResponsesIsEqualTo(int expectedNumberOfResponses) {
+        return new NumberOfSearchItemResponsesIsEqualToMatcher(expectedNumberOfResponses);
+    }
+
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfFieldsIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfFieldsIsEqualToMatcher.java
new file mode 100644
index 0000000000..ad8e9725c3
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfFieldsIsEqualToMatcher.java
@@ -0,0 +1,38 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse;
+
+class NumberOfFieldsIsEqualToMatcher extends TypeSafeDiagnosingMatcher<FieldCapabilitiesResponse> {
+
+    private final int expectedNumberOfFields;
+
+    NumberOfFieldsIsEqualToMatcher(int expectedNumberOfFields) {
+        this.expectedNumberOfFields = expectedNumberOfFields;
+    }
+
+    @Override
+    protected boolean matchesSafely(FieldCapabilitiesResponse response, Description mismatchDescription) {
+        if (expectedNumberOfFields != response.get().size()) {
+            mismatchDescription.appendText("Actual number of fields: ").appendValue(response.get().size());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Response contains information about ").appendValue(expectedNumberOfFields).appendText(" fields");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfGetItemResponsesIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfGetItemResponsesIsEqualToMatcher.java
new file mode 100644
index 0000000000..38cfaeb130
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfGetItemResponsesIsEqualToMatcher.java
@@ -0,0 +1,38 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.MultiGetResponse;
+
+class NumberOfGetItemResponsesIsEqualToMatcher extends TypeSafeDiagnosingMatcher<MultiGetResponse> {
+
+    private final int expectedNumberOfResponses;
+
+    NumberOfGetItemResponsesIsEqualToMatcher(int expectedNumberOfResponses) {
+        this.expectedNumberOfResponses = expectedNumberOfResponses;
+    }
+
+    @Override
+    protected boolean matchesSafely(MultiGetResponse response, Description mismatchDescription) {
+        if (expectedNumberOfResponses != response.getResponses().length) {
+            mismatchDescription.appendText("Actual number of responses: ").appendValue(response.getResponses().length);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Multi get response contains: ").appendValue(expectedNumberOfResponses).appendText(" item responses");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfHitsInPageIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfHitsInPageIsEqualToMatcher.java
new file mode 100644
index 0000000000..8a25a336f3
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfHitsInPageIsEqualToMatcher.java
@@ -0,0 +1,45 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHits;
+
+class NumberOfHitsInPageIsEqualToMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final int expectedNumberOfHits;
+
+    public NumberOfHitsInPageIsEqualToMatcher(int expectedNumberOfHits) {
+        this.expectedNumberOfHits = expectedNumberOfHits;
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        SearchHits hits = searchResponse.getHits();
+        if ((hits == null) || (hits.getHits() == null)) {
+            mismatchDescription.appendText("contains null hits");
+            return false;
+        }
+        int actualNumberOfHits = hits.getHits().length;
+        if (expectedNumberOfHits != actualNumberOfHits) {
+            mismatchDescription.appendText("actual number of hits is equal to ").appendValue(actualNumberOfHits);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Number of hits on current page should be equal to ").appendValue(expectedNumberOfHits);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfSearchItemResponsesIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfSearchItemResponsesIsEqualToMatcher.java
new file mode 100644
index 0000000000..54bb83cba7
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfSearchItemResponsesIsEqualToMatcher.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.MultiSearchResponse;
+
+class NumberOfSearchItemResponsesIsEqualToMatcher extends TypeSafeDiagnosingMatcher<MultiSearchResponse> {
+
+    private final int expectedNumberOfResponses;
+
+    NumberOfSearchItemResponsesIsEqualToMatcher(int expectedNumberOfResponses) {
+        this.expectedNumberOfResponses = expectedNumberOfResponses;
+    }
+
+    @Override
+    protected boolean matchesSafely(MultiSearchResponse response, Description mismatchDescription) {
+        if (expectedNumberOfResponses != response.getResponses().length) {
+            mismatchDescription.appendText("Actual number of responses: ").appendValue(response.getResponses().length);
+            return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Multi search response contains: ").appendValue(expectedNumberOfResponses).appendText(" item responses");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfTotalHitsIsEqualToMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfTotalHitsIsEqualToMatcher.java
new file mode 100644
index 0000000000..3f2b379498
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/NumberOfTotalHitsIsEqualToMatcher.java
@@ -0,0 +1,59 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Arrays;
+import java.util.stream.Collectors;
+
+import org.apache.lucene.search.TotalHits;
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHits;
+
+class NumberOfTotalHitsIsEqualToMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final int expectedNumberOfHits;
+
+    NumberOfTotalHitsIsEqualToMatcher(int expectedNumberOfHits) {
+        this.expectedNumberOfHits = expectedNumberOfHits;
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        SearchHits hits = searchResponse.getHits();
+        if (hits == null) {
+            mismatchDescription.appendText("contains null hits");
+            return false;
+        }
+        TotalHits totalHits = hits.getTotalHits();
+        if (totalHits == null) {
+            mismatchDescription.appendText("Total hits number is null.");
+            return false;
+        }
+        if (expectedNumberOfHits != totalHits.value) {
+            String documentIds = Arrays.stream(searchResponse.getHits().getHits())
+                .map(hit -> hit.getIndex() + "/" + hit.getId())
+                .collect(Collectors.joining(","));
+            mismatchDescription.appendText("contains ")
+                .appendValue(hits.getHits().length)
+                .appendText(" hits, found document ids ")
+                .appendValue(documentIds);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Search response should contains ").appendValue(expectedNumberOfHits).appendText(" hits");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/OpenSearchExceptionMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/OpenSearchExceptionMatchers.java
new file mode 100644
index 0000000000..6e8519c230
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/OpenSearchExceptionMatchers.java
@@ -0,0 +1,37 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.core.rest.RestStatus;
+
+import static org.hamcrest.Matchers.containsString;
+
+public class OpenSearchExceptionMatchers {
+
+    private OpenSearchExceptionMatchers() {}
+
+    public static Matcher<Throwable> statusException(RestStatus expectedRestStatus) {
+        return new OpenSearchStatusExceptionMatcher(expectedRestStatus);
+    }
+
+    public static Matcher<Throwable> errorMessage(Matcher<String> errorMessageMatcher) {
+        return new ExceptionErrorMessageMatcher(errorMessageMatcher);
+    }
+
+    public static Matcher<Throwable> errorMessageContain(String errorMessage) {
+        return errorMessage(containsString(errorMessage));
+    }
+
+    public static Matcher<Throwable> hasCause(Class<? extends Throwable> clazz) {
+        return new ExceptionHasCauseMatcher(clazz);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/OpenSearchStatusExceptionMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/OpenSearchStatusExceptionMatcher.java
new file mode 100644
index 0000000000..e8efcf151f
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/OpenSearchStatusExceptionMatcher.java
@@ -0,0 +1,52 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.OpenSearchException;
+import org.opensearch.core.rest.RestStatus;
+
+import static java.util.Objects.requireNonNull;
+
+class OpenSearchStatusExceptionMatcher extends TypeSafeDiagnosingMatcher<Throwable> {
+
+    private final RestStatus expectedRestStatus;
+
+    public OpenSearchStatusExceptionMatcher(RestStatus expectedRestStatus) {
+        this.expectedRestStatus = requireNonNull(expectedRestStatus, "Expected rest status is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Throwable throwable, Description mismatchDescription) {
+        if ((throwable instanceof OpenSearchException) == false) {
+            mismatchDescription.appendText("actual exception type is ")
+                .appendValue(throwable.getClass().getCanonicalName())
+                .appendText(", error message ")
+                .appendValue(throwable.getMessage());
+            return false;
+        }
+        OpenSearchException openSearchException = (OpenSearchException) throwable;
+        if (expectedRestStatus.equals(openSearchException.status()) == false) {
+            mismatchDescription.appendText("actual status code is ")
+                .appendValue(openSearchException.status())
+                .appendText(", error message ")
+                .appendValue(throwable.getMessage());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("OpenSearchException with status code ").appendValue(expectedRestStatus);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/PitResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/PitResponseMatchers.java
new file mode 100644
index 0000000000..84ab459210
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/PitResponseMatchers.java
@@ -0,0 +1,37 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.search.CreatePitResponse;
+import org.opensearch.action.search.DeletePitResponse;
+import org.opensearch.action.search.GetAllPitNodesResponse;
+
+public class PitResponseMatchers {
+
+    private PitResponseMatchers() {}
+
+    public static Matcher<CreatePitResponse> isSuccessfulCreatePitResponse() {
+        return new SuccessfulCreatePitResponseMatcher();
+    }
+
+    public static Matcher<GetAllPitNodesResponse> getAllResponseContainsExactlyPitWithIds(String... expectedPitIds) {
+        return new GetAllPitsContainsExactlyIdsResponseMatcher(expectedPitIds);
+    }
+
+    public static Matcher<DeletePitResponse> isSuccessfulDeletePitResponse() {
+        return new SuccessfulDeletePitResponseMatcher();
+    }
+
+    public static Matcher<DeletePitResponse> deleteResponseContainsExactlyPitWithIds(String... expectedPitIds) {
+        return new DeletePitContainsExactlyIdsResponseMatcher(expectedPitIds);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitContainsFieldWithValueMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitContainsFieldWithValueMatcher.java
new file mode 100644
index 0000000000..c92924ebfe
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitContainsFieldWithValueMatcher.java
@@ -0,0 +1,74 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHit;
+
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.readTotalHits;
+
+class SearchHitContainsFieldWithValueMatcher<T> extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final int hitIndex;
+
+    private final String fieldName;
+
+    private final T expectedValue;
+
+    SearchHitContainsFieldWithValueMatcher(int hitIndex, String fieldName, T expectedValue) {
+        this.hitIndex = hitIndex;
+        this.fieldName = fieldName;
+        this.expectedValue = expectedValue;
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        Long numberOfHits = readTotalHits(searchResponse);
+        if (numberOfHits == null) {
+            mismatchDescription.appendText("Total number of hits is unknown.");
+            return false;
+        }
+        if (hitIndex >= numberOfHits) {
+            mismatchDescription.appendText("Search result contain only ").appendValue(numberOfHits).appendText(" hits");
+            return false;
+        }
+        SearchHit searchHit = searchResponse.getHits().getAt(hitIndex);
+        Map<String, Object> source = searchHit.getSourceAsMap();
+        if (source == null) {
+            mismatchDescription.appendText("Source document is null, is fetch source option set to true?");
+            return false;
+        }
+        if (source.containsKey(fieldName) == false) {
+            mismatchDescription.appendText("Document does not contain field ").appendValue(fieldName);
+            return false;
+        }
+        Object actualValue = source.get(fieldName);
+        if (!expectedValue.equals(actualValue)) {
+            mismatchDescription.appendText("Field value is equal to ").appendValue(actualValue);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Search hit with index ")
+            .appendValue(hitIndex)
+            .appendText(" should contain field ")
+            .appendValue(fieldName)
+            .appendValue(" with value equal to ")
+            .appendValue(expectedValue);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitDoesNotContainFieldMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitDoesNotContainFieldMatcher.java
new file mode 100644
index 0000000000..0562acdcbb
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitDoesNotContainFieldMatcher.java
@@ -0,0 +1,65 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Map;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHit;
+
+import static java.util.Objects.requireNonNull;
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.readTotalHits;
+
+class SearchHitDoesNotContainFieldMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final int hitIndex;
+
+    private final String fieldName;
+
+    public SearchHitDoesNotContainFieldMatcher(int hitIndex, String fieldName) {
+        this.hitIndex = hitIndex;
+        this.fieldName = requireNonNull(fieldName, "Field name is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        Long numberOfHits = readTotalHits(searchResponse);
+        if (numberOfHits == null) {
+            mismatchDescription.appendText("Total number of hits is unknown.");
+            return false;
+        }
+        if (hitIndex >= numberOfHits) {
+            mismatchDescription.appendText("Search result contain only ").appendValue(numberOfHits).appendText(" hits");
+            return false;
+        }
+        SearchHit searchHit = searchResponse.getHits().getAt(hitIndex);
+        Map<String, Object> source = searchHit.getSourceAsMap();
+        if (source == null) {
+            mismatchDescription.appendText("Source document is null, is fetch source option set to true?");
+            return false;
+        }
+        if (source.containsKey(fieldName)) {
+            mismatchDescription.appendText(" document contains field ").appendValue(fieldName);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("search hit with index ")
+            .appendValue(hitIndex)
+            .appendText(" does not contain field ")
+            .appendValue(fieldName);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitsContainDocumentWithIdMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitsContainDocumentWithIdMatcher.java
new file mode 100644
index 0000000000..d21df7a578
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitsContainDocumentWithIdMatcher.java
@@ -0,0 +1,64 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHit;
+
+import static org.opensearch.test.framework.matcher.SearchResponseMatchers.readTotalHits;
+
+class SearchHitsContainDocumentWithIdMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final int hitIndex;
+    private final String indexName;
+    private final String id;
+
+    public SearchHitsContainDocumentWithIdMatcher(int hitIndex, String indexName, String id) {
+        this.hitIndex = hitIndex;
+        this.indexName = indexName;
+        this.id = id;
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        Long numberOfHits = readTotalHits(searchResponse);
+        if (numberOfHits == null) {
+            mismatchDescription.appendText("Number of total hits is unknown.");
+            return false;
+        }
+        if (hitIndex >= numberOfHits) {
+            mismatchDescription.appendText("Search result contain only ").appendValue(numberOfHits).appendText(" hits");
+            return false;
+        }
+        SearchHit searchHit = searchResponse.getHits().getAt(hitIndex);
+        if (indexName.equals(searchHit.getIndex()) == false) {
+            mismatchDescription.appendText("document is part of another index ").appendValue(indexName);
+            return false;
+        }
+        if (id.equals(searchHit.getId()) == false) {
+            mismatchDescription.appendText("Document has another id which is ").appendValue(searchHit.getId());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Search hit with index ")
+            .appendValue(hitIndex)
+            .appendText(" should contains document which is part of index ")
+            .appendValue(indexName)
+            .appendValue(" and has id ")
+            .appendValue(id);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitsContainDocumentsInAnyOrderMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitsContainDocumentsInAnyOrderMatcher.java
new file mode 100644
index 0000000000..28bf13e321
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchHitsContainDocumentsInAnyOrderMatcher.java
@@ -0,0 +1,76 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.tuple.Pair;
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.search.SearchHit;
+import org.opensearch.search.SearchHits;
+
+import static java.util.Objects.requireNonNull;
+
+class SearchHitsContainDocumentsInAnyOrderMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    /**
+    * Pair contain index name and document id
+    */
+    private final List<Pair<String, String>> documentIds;
+
+    /**
+    *
+    * @param documentIds Pair contain index name and document id
+    */
+    public SearchHitsContainDocumentsInAnyOrderMatcher(List<Pair<String, String>> documentIds) {
+        this.documentIds = requireNonNull(documentIds, "Document ids are required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse response, Description mismatchDescription) {
+        SearchHits hits = response.getHits();
+        if (hits == null) {
+            mismatchDescription.appendText("Search response does not contains hits (null).");
+            return false;
+        }
+        SearchHit[] hitsArray = hits.getHits();
+        if (hitsArray == null) {
+            mismatchDescription.appendText("Search hits array is null");
+            return false;
+        }
+        Set<Pair<String, String>> actualDocumentIds = Arrays.stream(hitsArray)
+            .map(result -> Pair.of(result.getIndex(), result.getId()))
+            .collect(Collectors.toSet());
+        for (Pair<String, String> desiredDocumentId : documentIds) {
+            if (actualDocumentIds.contains(desiredDocumentId) == false) {
+                mismatchDescription.appendText("search result does not contain document with id ")
+                    .appendValue(desiredDocumentId.getKey())
+                    .appendText("/")
+                    .appendValue(desiredDocumentId.getValue());
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        String documentIdsString = documentIds.stream()
+            .map(pair -> pair.getKey() + "/" + pair.getValue())
+            .collect(Collectors.joining(", "));
+        description.appendText("Search response should contains following documents ").appendValue(documentIdsString);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchResponseMatchers.java
new file mode 100644
index 0000000000..cf3a6d9e57
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchResponseMatchers.java
@@ -0,0 +1,87 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.commons.lang3.tuple.Pair;
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.search.SearchHits;
+
+public class SearchResponseMatchers {
+
+    private SearchResponseMatchers() {}
+
+    public static Matcher<SearchResponse> isSuccessfulSearchResponse() {
+        return new SuccessfulSearchResponseMatcher();
+    }
+
+    public static Matcher<SearchResponse> numberOfTotalHitsIsEqualTo(int expectedNumberOfHits) {
+        return new NumberOfTotalHitsIsEqualToMatcher(expectedNumberOfHits);
+    }
+
+    public static Matcher<SearchResponse> numberOfHitsInPageIsEqualTo(int expectedNumberOfHits) {
+        return new NumberOfHitsInPageIsEqualToMatcher(expectedNumberOfHits);
+    }
+
+    public static <T> Matcher<SearchResponse> searchHitContainsFieldWithValue(int hitIndex, String fieldName, T expectedValue) {
+        return new SearchHitContainsFieldWithValueMatcher<>(hitIndex, fieldName, expectedValue);
+    }
+
+    public static Matcher<SearchResponse> searchHitDoesNotContainField(int hitIndex, String fieldName) {
+        return new SearchHitDoesNotContainFieldMatcher(hitIndex, fieldName);
+    }
+
+    public static Matcher<SearchResponse> searchHitsContainDocumentWithId(int hitIndex, String indexName, String documentId) {
+        return new SearchHitsContainDocumentWithIdMatcher(hitIndex, indexName, documentId);
+    }
+
+    public static Matcher<SearchResponse> restStatusIs(RestStatus expectedRestStatus) {
+        return new SearchResponseWithStatusCodeMatcher(expectedRestStatus);
+    }
+
+    public static Matcher<SearchResponse> containNotEmptyScrollingId() {
+        return new ContainNotEmptyScrollingIdMatcher();
+    }
+
+    public static Matcher<SearchResponse> containAggregationWithNameAndType(
+        String expectedAggregationName,
+        String expectedAggregationType
+    ) {
+        return new ContainsAggregationWithNameAndTypeMatcher(expectedAggregationName, expectedAggregationType);
+    }
+
+    /**
+    * Matcher checks if search result contains all expected documents
+    *
+    * @param documentIds Pair contain index name and document id
+    * @return matcher
+    */
+    public static Matcher<SearchResponse> searchHitsContainDocumentsInAnyOrder(List<Pair<String, String>> documentIds) {
+        return new SearchHitsContainDocumentsInAnyOrderMatcher(documentIds);
+    }
+
+    public static Matcher<SearchResponse> searchHitsContainDocumentsInAnyOrder(Pair<String, String>... documentIds) {
+        return new SearchHitsContainDocumentsInAnyOrderMatcher(Arrays.asList(documentIds));
+    }
+
+    static Long readTotalHits(SearchResponse searchResponse) {
+        return Optional.ofNullable(searchResponse)
+            .map(SearchResponse::getHits)
+            .map(SearchHits::getTotalHits)
+            .map(totalHits -> totalHits.value)
+            .orElse(null);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchResponseWithStatusCodeMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchResponseWithStatusCodeMatcher.java
new file mode 100644
index 0000000000..5b0f99e11b
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SearchResponseWithStatusCodeMatcher.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SearchResponseWithStatusCodeMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    private final RestStatus expectedRestStatus;
+
+    public SearchResponseWithStatusCodeMatcher(RestStatus expectedRestStatus) {
+        this.expectedRestStatus = expectedRestStatus;
+    }
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        if (expectedRestStatus.equals(searchResponse.status()) == false) {
+            mismatchDescription.appendText("actual response status is ").appendValue(searchResponse.status());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Expected response status is ").appendValue(expectedRestStatus);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SnapshotInClusterDoesNotExist.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SnapshotInClusterDoesNotExist.java
new file mode 100644
index 0000000000..36e50143f0
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SnapshotInClusterDoesNotExist.java
@@ -0,0 +1,49 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
+import org.opensearch.client.Client;
+import org.opensearch.snapshots.SnapshotMissingException;
+
+import static java.util.Objects.requireNonNull;
+
+class SnapshotInClusterDoesNotExist extends TypeSafeDiagnosingMatcher<Client> {
+    private final String repositoryName;
+    private final String snapshotName;
+
+    public SnapshotInClusterDoesNotExist(String repositoryName, String snapshotName) {
+        this.repositoryName = requireNonNull(repositoryName, "Snapshot repository name is required.");
+        this.snapshotName = requireNonNull(snapshotName, "Snapshot name is required.");
+    }
+
+    @Override
+    protected boolean matchesSafely(Client client, Description mismatchDescription) {
+        try {
+            GetSnapshotsRequest request = new GetSnapshotsRequest(repositoryName, new String[] { snapshotName });
+            client.admin().cluster().getSnapshots(request).actionGet();
+            mismatchDescription.appendText("snapshot exists");
+            return false;
+        } catch (SnapshotMissingException e) {
+            return true;
+        }
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Snapshot ")
+            .appendValue(snapshotName)
+            .appendText(" does not exist in repository ")
+            .appendValue(repositoryName);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessBulkResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessBulkResponseMatcher.java
new file mode 100644
index 0000000000..ca4b94d148
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessBulkResponseMatcher.java
@@ -0,0 +1,47 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import java.util.Arrays;
+import java.util.stream.Collectors;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.bulk.BulkItemResponse;
+import org.opensearch.action.bulk.BulkResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessBulkResponseMatcher extends TypeSafeDiagnosingMatcher<BulkResponse> {
+
+    @Override
+    protected boolean matchesSafely(BulkResponse response, Description mismatchDescription) {
+        RestStatus status = response.status();
+        if (RestStatus.OK.equals(status) == false) {
+            mismatchDescription.appendText("incorrect response status ").appendValue(status);
+            return false;
+        }
+        if (response.hasFailures()) {
+            String failureDescription = Arrays.stream(response.getItems())
+                .filter(BulkItemResponse::isFailed)
+                .map(BulkItemResponse::getFailure)
+                .map(Object::toString)
+                .collect(Collectors.joining(",\n"));
+            mismatchDescription.appendText("bulk response contains failures ").appendValue(failureDescription);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("success bulk response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulClearIndicesCacheResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulClearIndicesCacheResponseMatcher.java
new file mode 100644
index 0000000000..b70b2c2f9e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulClearIndicesCacheResponseMatcher.java
@@ -0,0 +1,37 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessfulClearIndicesCacheResponseMatcher extends TypeSafeDiagnosingMatcher<ClearIndicesCacheResponse> {
+
+    @Override
+    protected boolean matchesSafely(ClearIndicesCacheResponse response, Description mismatchDescription) {
+        if (!RestStatus.OK.equals(response.getStatus())) {
+            mismatchDescription.appendText("Status is equal to ").appendValue(response.getStatus());
+            return false;
+        }
+        if (response.getShardFailures().length != 0) {
+            mismatchDescription.appendText("Contains ").appendValue(response.getShardFailures().length).appendText(" shard failures");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful clear index cache response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCloseIndexResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCloseIndexResponseMatcher.java
new file mode 100644
index 0000000000..480b7845e9
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCloseIndexResponseMatcher.java
@@ -0,0 +1,36 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.client.indices.CloseIndexResponse;
+
+class SuccessfulCloseIndexResponseMatcher extends TypeSafeDiagnosingMatcher<CloseIndexResponse> {
+
+    @Override
+    protected boolean matchesSafely(CloseIndexResponse response, Description mismatchDescription) {
+        if (!response.isShardsAcknowledged()) {
+            mismatchDescription.appendText("shardsAcknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        if (!response.isAcknowledged()) {
+            mismatchDescription.appendText("acknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful close index response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCreateIndexResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCreateIndexResponseMatcher.java
new file mode 100644
index 0000000000..810c93e034
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCreateIndexResponseMatcher.java
@@ -0,0 +1,51 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.client.indices.CreateIndexResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class SuccessfulCreateIndexResponseMatcher extends TypeSafeDiagnosingMatcher<CreateIndexResponse> {
+
+    private final String expectedIndexName;
+
+    SuccessfulCreateIndexResponseMatcher(String expectedIndexName) {
+        this.expectedIndexName = requireNonNull(expectedIndexName);
+    }
+
+    @Override
+    protected boolean matchesSafely(CreateIndexResponse response, Description mismatchDescription) {
+        if (!expectedIndexName.equals(response.index())) {
+            mismatchDescription.appendText("Index name ")
+                .appendValue(response.index())
+                .appendText(" does not match expected index name ")
+                .appendValue(expectedIndexName);
+            return false;
+        }
+        if (!response.isShardsAcknowledged()) {
+            mismatchDescription.appendText("shardsAcknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        if (!response.isAcknowledged()) {
+            mismatchDescription.appendText("acknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful create index response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCreatePitResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCreatePitResponseMatcher.java
new file mode 100644
index 0000000000..66b59b1526
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulCreatePitResponseMatcher.java
@@ -0,0 +1,37 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.CreatePitResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessfulCreatePitResponseMatcher extends TypeSafeDiagnosingMatcher<CreatePitResponse> {
+
+    @Override
+    protected boolean matchesSafely(CreatePitResponse response, Description mismatchDescription) {
+        if (!RestStatus.OK.equals(response.status())) {
+            mismatchDescription.appendText("has status ").appendValue(response.status()).appendText(" which denotes failure.");
+            return false;
+        }
+        if (response.getShardFailures().length != 0) {
+            mismatchDescription.appendText("contains ").appendValue(response.getShardFailures().length).appendText(" shard failures");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful create pit response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulDeletePitResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulDeletePitResponseMatcher.java
new file mode 100644
index 0000000000..20906946f1
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulDeletePitResponseMatcher.java
@@ -0,0 +1,42 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.DeletePitInfo;
+import org.opensearch.action.search.DeletePitResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessfulDeletePitResponseMatcher extends TypeSafeDiagnosingMatcher<DeletePitResponse> {
+
+    @Override
+    protected boolean matchesSafely(DeletePitResponse response, Description mismatchDescription) {
+        if (!RestStatus.OK.equals(response.status())) {
+            mismatchDescription.appendText("has status ").appendValue(response.status()).appendText(" which denotes failure.");
+            return false;
+        }
+        for (DeletePitInfo deletePitInfo : response.getDeletePitResults()) {
+            if (!deletePitInfo.isSuccessful()) {
+                mismatchDescription.appendValue("Pit: ")
+                    .appendValue(deletePitInfo.getPitId())
+                    .appendText(" - delete result was not successful");
+                return false;
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful delete pit response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulDeleteResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulDeleteResponseMatcher.java
new file mode 100644
index 0000000000..6c10b2b6f8
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulDeleteResponseMatcher.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.delete.DeleteResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessfulDeleteResponseMatcher extends TypeSafeDiagnosingMatcher<DeleteResponse> {
+
+    @Override
+    protected boolean matchesSafely(DeleteResponse response, Description mismatchDescription) {
+        if (!RestStatus.OK.equals(response.status())) {
+            mismatchDescription.appendText("has status ").appendValue(response.status()).appendText(" which denotes failure.");
+            return false;
+        }
+        if (response.getShardInfo().getFailures().length != 0) {
+            mismatchDescription.appendText("contains ")
+                .appendValue(response.getShardInfo().getFailures().length)
+                .appendText(" shard failures");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful delete response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulMultiGetResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulMultiGetResponseMatcher.java
new file mode 100644
index 0000000000..fce5d1201c
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulMultiGetResponseMatcher.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.get.MultiGetItemResponse;
+import org.opensearch.action.get.MultiGetResponse;
+
+class SuccessfulMultiGetResponseMatcher extends TypeSafeDiagnosingMatcher<MultiGetResponse> {
+
+    @Override
+    protected boolean matchesSafely(MultiGetResponse response, Description mismatchDescription) {
+        for (MultiGetItemResponse getItemResponse : response.getResponses()) {
+            if (getItemResponse.isFailed()) {
+                mismatchDescription.appendValue("Get an item from index: ")
+                    .appendValue(getItemResponse.getIndex())
+                    .appendText(" failed: ")
+                    .appendValue(getItemResponse.getFailure().getMessage());
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful multi get response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulMultiSearchResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulMultiSearchResponseMatcher.java
new file mode 100644
index 0000000000..e601f16e8e
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulMultiSearchResponseMatcher.java
@@ -0,0 +1,35 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.MultiSearchResponse;
+
+class SuccessfulMultiSearchResponseMatcher extends TypeSafeDiagnosingMatcher<MultiSearchResponse> {
+
+    @Override
+    protected boolean matchesSafely(MultiSearchResponse response, Description mismatchDescription) {
+        for (MultiSearchResponse.Item itemResponse : response.getResponses()) {
+            if (itemResponse.isFailure()) {
+                mismatchDescription.appendValue("Get an item failed: ").appendValue(itemResponse.getFailureMessage());
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful multi search response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulOpenIndexResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulOpenIndexResponseMatcher.java
new file mode 100644
index 0000000000..68389979b1
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulOpenIndexResponseMatcher.java
@@ -0,0 +1,36 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.admin.indices.open.OpenIndexResponse;
+
+class SuccessfulOpenIndexResponseMatcher extends TypeSafeDiagnosingMatcher<OpenIndexResponse> {
+
+    @Override
+    protected boolean matchesSafely(OpenIndexResponse response, Description mismatchDescription) {
+        if (!response.isShardsAcknowledged()) {
+            mismatchDescription.appendText("shardsAcknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        if (!response.isAcknowledged()) {
+            mismatchDescription.appendText("acknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful open index response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulResizeResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulResizeResponseMatcher.java
new file mode 100644
index 0000000000..915a0f39bb
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulResizeResponseMatcher.java
@@ -0,0 +1,51 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.client.indices.ResizeResponse;
+
+import static java.util.Objects.requireNonNull;
+
+class SuccessfulResizeResponseMatcher extends TypeSafeDiagnosingMatcher<ResizeResponse> {
+
+    private final String expectedIndexName;
+
+    SuccessfulResizeResponseMatcher(String expectedIndexName) {
+        this.expectedIndexName = requireNonNull(expectedIndexName);
+    }
+
+    @Override
+    protected boolean matchesSafely(ResizeResponse response, Description mismatchDescription) {
+        if (!expectedIndexName.equals(response.index())) {
+            mismatchDescription.appendText("Index name ")
+                .appendValue(response.index())
+                .appendText(" does not match expected index name ")
+                .appendValue(expectedIndexName);
+            return false;
+        }
+        if (!response.isShardsAcknowledged()) {
+            mismatchDescription.appendText("shardsAcknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        if (!response.isAcknowledged()) {
+            mismatchDescription.appendText("acknowledged is equal to ").appendValue(response.isShardsAcknowledged());
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful create index response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulSearchResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulSearchResponseMatcher.java
new file mode 100644
index 0000000000..21017a9014
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulSearchResponseMatcher.java
@@ -0,0 +1,37 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.search.SearchResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessfulSearchResponseMatcher extends TypeSafeDiagnosingMatcher<SearchResponse> {
+
+    @Override
+    protected boolean matchesSafely(SearchResponse searchResponse, Description mismatchDescription) {
+        if (RestStatus.OK.equals(searchResponse.status()) == false) {
+            mismatchDescription.appendText("has status ").appendValue(searchResponse.status()).appendText(" which denotes failure.");
+            return false;
+        }
+        if (searchResponse.getShardFailures().length != 0) {
+            mismatchDescription.appendText("contains ").appendValue(searchResponse.getShardFailures().length).appendText(" shard failures");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful search response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulUpdateResponseMatcher.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulUpdateResponseMatcher.java
new file mode 100644
index 0000000000..14faab0c4c
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/SuccessfulUpdateResponseMatcher.java
@@ -0,0 +1,39 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Description;
+import org.hamcrest.TypeSafeDiagnosingMatcher;
+
+import org.opensearch.action.update.UpdateResponse;
+import org.opensearch.core.rest.RestStatus;
+
+class SuccessfulUpdateResponseMatcher extends TypeSafeDiagnosingMatcher<UpdateResponse> {
+
+    @Override
+    protected boolean matchesSafely(UpdateResponse response, Description mismatchDescription) {
+        if (!RestStatus.OK.equals(response.status())) {
+            mismatchDescription.appendText("has status ").appendValue(response.status()).appendText(" which denotes failure.");
+            return false;
+        }
+        if (response.getShardInfo().getFailures().length != 0) {
+            mismatchDescription.appendText("contains ")
+                .appendValue(response.getShardInfo().getFailures().length)
+                .appendText(" shard failures");
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+        description.appendText("Successful update response");
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/matcher/UpdateResponseMatchers.java b/src/integrationTest/java/org/opensearch/test/framework/matcher/UpdateResponseMatchers.java
new file mode 100644
index 0000000000..ee01fabced
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/matcher/UpdateResponseMatchers.java
@@ -0,0 +1,23 @@
+/*
+* Copyright OpenSearch Contributors
+* SPDX-License-Identifier: Apache-2.0
+*
+* The OpenSearch Contributors require contributions made to
+* this file be licensed under the Apache-2.0 license or a
+* compatible open source license.
+*
+*/
+package org.opensearch.test.framework.matcher;
+
+import org.hamcrest.Matcher;
+
+import org.opensearch.action.update.UpdateResponse;
+
+public class UpdateResponseMatchers {
+
+    private UpdateResponseMatchers() {}
+
+    public static Matcher<UpdateResponse> isSuccessfulUpdateResponse() {
+        return new SuccessfulUpdateResponseMatcher();
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/AbstractRestHandler.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/AbstractRestHandler.java
new file mode 100644
index 0000000000..764173b5b9
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/AbstractRestHandler.java
@@ -0,0 +1,53 @@
+package org.opensearch.test.framework.testplugins;
+
+import org.opensearch.ExceptionsHelper;
+import org.opensearch.client.node.NodeClient;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.core.xcontent.XContentBuilder;
+import org.opensearch.rest.BaseRestHandler;
+import org.opensearch.rest.BytesRestResponse;
+import org.opensearch.rest.RestChannel;
+import org.opensearch.rest.RestRequest;
+
+import java.io.IOException;
+
+public class AbstractRestHandler extends BaseRestHandler {
+
+    @Override
+    public String getName() {
+        return getClass().getSimpleName();
+    }
+
+    @Override
+    protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
+        switch (request.method()) {
+            case GET:
+                return channel -> handleGet(channel, request, client);
+            case POST:
+                return channel -> handlePost(channel, request, client);
+            default:
+                throw new IllegalArgumentException(request.method() + " not supported");
+        }
+    }
+
+    private void notImplemented(RestChannel channel, RestRequest.Method method) {
+        try {
+            final XContentBuilder builder = channel.newBuilder();
+            builder.startObject();
+            builder.field("status", RestStatus.NOT_IMPLEMENTED.name());
+            builder.field("message", "Method " + method + " not implemented.");
+            builder.endObject();
+            channel.sendResponse(new BytesRestResponse(RestStatus.NOT_IMPLEMENTED, builder));
+        } catch (IOException e) {
+            throw ExceptionsHelper.convertToOpenSearchException(e);
+        }
+    }
+
+    public void handlePost(RestChannel channel, RestRequest request, NodeClient client) {
+        notImplemented(channel, request.method());
+    }
+
+    public void handleGet(RestChannel channel, RestRequest request, NodeClient client) {
+        notImplemented(channel, request.method());
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/CustomLegacyTestPlugin.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/CustomLegacyTestPlugin.java
new file mode 100644
index 0000000000..648abef704
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/CustomLegacyTestPlugin.java
@@ -0,0 +1,61 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummy;
+
+import org.opensearch.action.ActionRequest;
+import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
+import org.opensearch.cluster.node.DiscoveryNodes;
+import org.opensearch.common.settings.ClusterSettings;
+import org.opensearch.common.settings.IndexScopedSettings;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.settings.SettingsFilter;
+import org.opensearch.core.action.ActionResponse;
+import org.opensearch.plugins.ActionPlugin;
+import org.opensearch.plugins.ClusterPlugin;
+import org.opensearch.plugins.NetworkPlugin;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.rest.RestController;
+import org.opensearch.rest.RestHandler;
+import org.opensearch.test.framework.testplugins.dummy.dummyaction.DummyAction;
+import org.opensearch.test.framework.testplugins.dummy.dummyaction.TransportDummyAction;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Supplier;
+
+/**
+ * Registers a plugin with legacy routes using {@link org.opensearch.rest.RestHandler.Route}
+ */
+public class CustomLegacyTestPlugin extends Plugin implements ClusterPlugin, NetworkPlugin, ActionPlugin {
+
+    @Override
+    public List<RestHandler> getRestHandlers(
+        Settings settings,
+        RestController restController,
+        ClusterSettings clusterSettings,
+        IndexScopedSettings indexScopedSettings,
+        SettingsFilter settingsFilter,
+        IndexNameExpressionResolver indexNameExpressionResolver,
+        Supplier<DiscoveryNodes> nodesInCluster
+    ) {
+        final List<RestHandler> handlers = new ArrayList<RestHandler>(1);
+        handlers.add(new LegacyRestHandler());
+        return handlers;
+    }
+
+    @Override
+    public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
+        List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>(1);
+        actions.add(new ActionHandler<>(DummyAction.INSTANCE, TransportDummyAction.class));
+        return actions;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/LegacyRestHandler.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/LegacyRestHandler.java
new file mode 100644
index 0000000000..e001628596
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/LegacyRestHandler.java
@@ -0,0 +1,56 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummy;
+
+import com.google.common.collect.ImmutableList;
+import org.opensearch.client.node.NodeClient;
+import org.opensearch.rest.RestChannel;
+import org.opensearch.rest.RestRequest;
+import org.opensearch.rest.action.RestStatusToXContentListener;
+import org.opensearch.test.framework.testplugins.AbstractRestHandler;
+import org.opensearch.test.framework.testplugins.dummy.dummyaction.DummyAction;
+import org.opensearch.test.framework.testplugins.dummy.dummyaction.DummyRequest;
+
+import java.util.List;
+
+import static org.opensearch.rest.RestRequest.Method.GET;
+import static org.opensearch.rest.RestRequest.Method.POST;
+import static org.opensearch.security.dlic.rest.support.Utils.addRoutesPrefix;
+
+public class LegacyRestHandler extends AbstractRestHandler {
+
+    private static final List<Route> routes = addRoutesPrefix(
+        ImmutableList.of(new Route(POST, "/dummy"), new Route(GET, "/dummy")),
+        "/_plugins/_dummy"
+    );
+
+    public LegacyRestHandler() {
+        super();
+    }
+
+    @Override
+    public List<Route> routes() {
+        return routes;
+    }
+
+    @Override
+    public String getName() {
+        return "Dummy Rest Action";
+    }
+
+    @Override
+    public void handleGet(RestChannel channel, RestRequest request, NodeClient client) {
+        String message = request.param("message");
+        DummyRequest dummyRequest = new DummyRequest(message);
+        client.execute(DummyAction.INSTANCE, dummyRequest, new RestStatusToXContentListener<>(channel));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyAction.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyAction.java
new file mode 100644
index 0000000000..ff10f0ca74
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyAction.java
@@ -0,0 +1,24 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummy.dummyaction;
+
+import org.opensearch.action.ActionType;
+
+public class DummyAction extends ActionType<DummyResponse> {
+
+    public static final DummyAction INSTANCE = new DummyAction();
+    public static final String NAME = "cluster:admin/dummy_plugin/dummy";
+
+    protected DummyAction() {
+        super(NAME, DummyResponse::new);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyRequest.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyRequest.java
new file mode 100644
index 0000000000..5928b4892f
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyRequest.java
@@ -0,0 +1,46 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummy.dummyaction;
+
+import org.opensearch.action.ActionRequest;
+import org.opensearch.action.ActionRequestValidationException;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.xcontent.ToXContent;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+public class DummyRequest extends ActionRequest implements ToXContent {
+
+    private final String message;
+
+    public DummyRequest(final StreamInput in) throws IOException {
+        super(in);
+        message = in.readString();
+    }
+
+    public DummyRequest(String message) {
+        this.message = message;
+    }
+
+    @Override
+    public ActionRequestValidationException validate() {
+        return null;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.field("message", message);
+
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyResponse.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyResponse.java
new file mode 100644
index 0000000000..e2ee6c9344
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/DummyResponse.java
@@ -0,0 +1,59 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummy.dummyaction;
+
+import org.opensearch.common.xcontent.StatusToXContentObject;
+import org.opensearch.core.action.ActionResponse;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.common.io.stream.StreamOutput;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+public class DummyResponse extends ActionResponse implements StatusToXContentObject {
+    private final String responseString;
+
+    public DummyResponse(String responseString) {
+        this.responseString = responseString;
+    }
+
+    public DummyResponse(StreamInput in) throws IOException {
+        super(in);
+        responseString = in.readString();
+    }
+
+    @Override
+    public void writeTo(StreamOutput out) throws IOException {
+        out.writeString(responseString);
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+        builder.startObject();
+        builder.field("response_string", responseString);
+        builder.endObject();
+        return builder;
+    }
+
+    @Override
+    public String toString() {
+        return Strings.toString(MediaTypeRegistry.JSON, this, true, true);
+    }
+
+    @Override
+    public RestStatus status() {
+        return RestStatus.OK;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/TransportDummyAction.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/TransportDummyAction.java
new file mode 100644
index 0000000000..7e30af64bd
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummy/dummyaction/TransportDummyAction.java
@@ -0,0 +1,33 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummy.dummyaction;
+
+import org.opensearch.action.support.ActionFilters;
+import org.opensearch.action.support.HandledTransportAction;
+import org.opensearch.common.inject.Inject;
+import org.opensearch.core.action.ActionListener;
+import org.opensearch.tasks.Task;
+import org.opensearch.transport.TransportService;
+
+public class TransportDummyAction extends HandledTransportAction<DummyRequest, DummyResponse> {
+
+    @Inject
+    public TransportDummyAction(final TransportService transportService, final ActionFilters actionFilters) {
+        super(DummyAction.NAME, transportService, actionFilters, DummyRequest::new);
+    }
+
+    @Override
+    protected void doExecute(Task task, DummyRequest request, ActionListener<DummyResponse> listener) {
+        String responseString = "Hello from dummy plugin";
+        listener.onResponse(new DummyResponse(responseString));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/CustomRestProtectedTestPlugin.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/CustomRestProtectedTestPlugin.java
new file mode 100644
index 0000000000..780bee4ac6
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/CustomRestProtectedTestPlugin.java
@@ -0,0 +1,62 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummyprotected;
+
+import org.opensearch.action.ActionRequest;
+import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
+import org.opensearch.cluster.node.DiscoveryNodes;
+import org.opensearch.common.settings.ClusterSettings;
+import org.opensearch.common.settings.IndexScopedSettings;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.settings.SettingsFilter;
+import org.opensearch.core.action.ActionResponse;
+import org.opensearch.plugins.ActionPlugin;
+import org.opensearch.plugins.ClusterPlugin;
+import org.opensearch.plugins.NetworkPlugin;
+import org.opensearch.plugins.Plugin;
+import org.opensearch.rest.RestController;
+import org.opensearch.rest.RestHandler;
+import org.opensearch.test.framework.testplugins.dummyprotected.dummyaction.DummyAction;
+import org.opensearch.test.framework.testplugins.dummyprotected.dummyaction.TransportDummyAction;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Supplier;
+
+/**
+ * Registers a plugin with protected routes using {@linkplain org.opensearch.rest.NamedRoute}
+ * This allows authorization against REST layer
+ */
+public class CustomRestProtectedTestPlugin extends Plugin implements ClusterPlugin, NetworkPlugin, ActionPlugin {
+
+    @Override
+    public List<RestHandler> getRestHandlers(
+        Settings settings,
+        RestController restController,
+        ClusterSettings clusterSettings,
+        IndexScopedSettings indexScopedSettings,
+        SettingsFilter settingsFilter,
+        IndexNameExpressionResolver indexNameExpressionResolver,
+        Supplier<DiscoveryNodes> nodesInCluster
+    ) {
+        final List<RestHandler> handlers = new ArrayList<RestHandler>(1);
+        handlers.add(new ProtectedRoutesRestHandler());
+        return handlers;
+    }
+
+    @Override
+    public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
+        List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>(1);
+        actions.add(new ActionHandler<>(DummyAction.INSTANCE, TransportDummyAction.class));
+        return actions;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/ProtectedRoutesRestHandler.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/ProtectedRoutesRestHandler.java
new file mode 100644
index 0000000000..5f20585cd6
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/ProtectedRoutesRestHandler.java
@@ -0,0 +1,70 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummyprotected;
+
+import java.util.List;
+import java.util.Set;
+
+import com.google.common.collect.ImmutableList;
+
+import org.opensearch.client.node.NodeClient;
+import org.opensearch.rest.NamedRoute;
+import org.opensearch.rest.RestChannel;
+import org.opensearch.rest.RestRequest;
+import org.opensearch.rest.action.RestStatusToXContentListener;
+import org.opensearch.test.framework.testplugins.AbstractRestHandler;
+import org.opensearch.test.framework.testplugins.dummyprotected.dummyaction.DummyAction;
+import org.opensearch.test.framework.testplugins.dummyprotected.dummyaction.DummyRequest;
+
+import static org.opensearch.rest.RestRequest.Method.GET;
+import static org.opensearch.rest.RestRequest.Method.POST;
+import static org.opensearch.security.dlic.rest.support.Utils.addRoutesPrefix;
+
+public class ProtectedRoutesRestHandler extends AbstractRestHandler {
+
+    private static final List<Route> routes = addRoutesPrefix(
+        ImmutableList.of(
+            new NamedRoute.Builder().method(POST)
+                .path("/dummy")
+                .uniqueName("security:dummy_protected/post")
+                .legacyActionNames(Set.of("cluster:admin/dummy_protected_plugin/dummy/post"))
+                .build(),
+            new NamedRoute.Builder().method(GET)
+                .path("/dummy")
+                .uniqueName("security:dummy_protected/get")
+                .legacyActionNames(Set.of("cluster:admin/dummy_protected_plugin/dummy/get"))
+                .build()
+        ),
+        "/_plugins/_dummy_protected"
+    );
+
+    public ProtectedRoutesRestHandler() {
+        super();
+    }
+
+    @Override
+    public List<Route> routes() {
+        return routes;
+    }
+
+    @Override
+    public String getName() {
+        return "Dummy Protected Rest Action";
+    }
+
+    @Override
+    public void handleGet(RestChannel channel, RestRequest request, NodeClient client) {
+        String message = request.param("message");
+        DummyRequest dummyRequest = new DummyRequest(message);
+        client.execute(DummyAction.INSTANCE, dummyRequest, new RestStatusToXContentListener<>(channel));
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyAction.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyAction.java
new file mode 100644
index 0000000000..26edfa4e17
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyAction.java
@@ -0,0 +1,24 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummyprotected.dummyaction;
+
+import org.opensearch.action.ActionType;
+
+public class DummyAction extends ActionType<DummyResponse> {
+
+    public static final DummyAction INSTANCE = new DummyAction();
+    public static final String NAME = "cluster:admin/dummy_protected_plugin/dummy/get";
+
+    protected DummyAction() {
+        super(NAME, DummyResponse::new);
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyRequest.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyRequest.java
new file mode 100644
index 0000000000..025d2e1c55
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyRequest.java
@@ -0,0 +1,45 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummyprotected.dummyaction;
+
+import org.opensearch.action.ActionRequest;
+import org.opensearch.action.ActionRequestValidationException;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.xcontent.ToXContent;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+public class DummyRequest extends ActionRequest implements ToXContent {
+
+    private final String message;
+
+    public DummyRequest(final StreamInput in) throws IOException {
+        super(in);
+        message = in.readString();
+    }
+
+    public DummyRequest(String message) {
+        this.message = message;
+    }
+
+    @Override
+    public ActionRequestValidationException validate() {
+        return null;
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException {
+        xContentBuilder.field("message", message);
+        return xContentBuilder;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyResponse.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyResponse.java
new file mode 100644
index 0000000000..efd7be49b4
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/DummyResponse.java
@@ -0,0 +1,59 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummyprotected.dummyaction;
+
+import org.opensearch.common.xcontent.StatusToXContentObject;
+import org.opensearch.core.action.ActionResponse;
+import org.opensearch.core.common.Strings;
+import org.opensearch.core.common.io.stream.StreamInput;
+import org.opensearch.core.common.io.stream.StreamOutput;
+import org.opensearch.core.rest.RestStatus;
+import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.core.xcontent.XContentBuilder;
+
+import java.io.IOException;
+
+public class DummyResponse extends ActionResponse implements StatusToXContentObject {
+    private final String responseString;
+
+    public DummyResponse(String responseString) {
+        this.responseString = responseString;
+    }
+
+    public DummyResponse(StreamInput in) throws IOException {
+        super(in);
+        responseString = in.readString();
+    }
+
+    @Override
+    public void writeTo(StreamOutput out) throws IOException {
+        out.writeString(responseString);
+    }
+
+    @Override
+    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+        builder.startObject();
+        builder.field("response_string", this.responseString);
+        builder.endObject();
+        return builder;
+    }
+
+    @Override
+    public String toString() {
+        return Strings.toString(MediaTypeRegistry.JSON, this, true, true);
+    }
+
+    @Override
+    public RestStatus status() {
+        return RestStatus.OK;
+    }
+}
diff --git a/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/TransportDummyAction.java b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/TransportDummyAction.java
new file mode 100644
index 0000000000..05a10d875a
--- /dev/null
+++ b/src/integrationTest/java/org/opensearch/test/framework/testplugins/dummyprotected/dummyaction/TransportDummyAction.java
@@ -0,0 +1,33 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package org.opensearch.test.framework.testplugins.dummyprotected.dummyaction;
+
+import org.opensearch.action.support.ActionFilters;
+import org.opensearch.action.support.HandledTransportAction;
+import org.opensearch.common.inject.Inject;
+import org.opensearch.core.action.ActionListener;
+import org.opensearch.tasks.Task;
+import org.opensearch.transport.TransportService;
+
+public class TransportDummyAction extends HandledTransportAction<DummyRequest, DummyResponse> {
+
+    @Inject
+    public TransportDummyAction(final TransportService transportService, final ActionFilters actionFilters) {
+        super(DummyAction.NAME, transportService, actionFilters, DummyRequest::new);
+    }
+
+    @Override
+    protected void doExecute(Task task, DummyRequest request, ActionListener<DummyResponse> listener) {
+        String responseString = "Hello from dummy protected plugin";
+        listener.onResponse(new DummyResponse(responseString));
+    }
+}
diff --git a/src/integrationTest/resources/action_groups.yml b/src/integrationTest/resources/action_groups.yml
new file mode 100644
index 0000000000..32188f69d0
--- /dev/null
+++ b/src/integrationTest/resources/action_groups.yml
@@ -0,0 +1,4 @@
+---
+_meta:
+  type: "actiongroups"
+  config_version: 2
diff --git a/src/integrationTest/resources/allowlist.yml b/src/integrationTest/resources/allowlist.yml
new file mode 100644
index 0000000000..d1b4540d6d
--- /dev/null
+++ b/src/integrationTest/resources/allowlist.yml
@@ -0,0 +1,4 @@
+---
+_meta:
+  type: "allowlist"
+  config_version: 2
diff --git a/src/integrationTest/resources/config.yml b/src/integrationTest/resources/config.yml
new file mode 100644
index 0000000000..17aeb1881d
--- /dev/null
+++ b/src/integrationTest/resources/config.yml
@@ -0,0 +1,22 @@
+---
+_meta:
+  type: "config"
+  config_version: 2
+config:
+  dynamic:
+    authc:
+      basic:
+        http_enabled: true
+        order: 0
+        http_authenticator:
+          type: "basic"
+          challenge: true
+          config: {}
+        authentication_backend:
+          type: "internal"
+          config: {}
+    on_behalf_of:
+      # The decoded signing key is: This is the jwt signing key for an on behalf of token authentication backend for testing of extensions
+      # The decoded encryption key is: encryptionKey
+      signing_key: "VGhpcyBpcyB0aGUgand0IHNpZ25pbmcga2V5IGZvciBhbiBvbiBiZWhhbGYgb2YgdG9rZW4gYXV0aGVudGljYXRpb24gYmFja2VuZCBmb3IgdGVzdGluZyBvZiBleHRlbnNpb25z"
+      encryption_key: "ZW5jcnlwdGlvbktleQ=="
diff --git a/src/integrationTest/resources/internal_users.yml b/src/integrationTest/resources/internal_users.yml
new file mode 100644
index 0000000000..866a879165
--- /dev/null
+++ b/src/integrationTest/resources/internal_users.yml
@@ -0,0 +1,14 @@
+---
+_meta:
+  type: "internalusers"
+  config_version: 2
+new-user:
+  hash: "$2y$12$d2KAKcGE9qoywfu.c.hV/.pHigC7HTZFp2yJzBo8z2w.585t7XDWO"
+limited-user:
+  hash: "$2y$12$fOJAMx0U7e7M4OObVPzm6eUTnAyN/Gtpzfv34M6PL1bfusae43a52"
+  opendistro_security_roles:
+  - "user_limited-user__limited-role"
+admin:
+  hash: "$2y$12$53iW.RRy.uumsmU7lrlp7OUCPdxz40Z5uIJo1WcCC2GNFwEWNiTD6"
+  opendistro_security_roles:
+  - "user_admin__all_access"
diff --git a/src/integrationTest/resources/log4j2-test.properties b/src/integrationTest/resources/log4j2-test.properties
new file mode 100644
index 0000000000..8d9cf87666
--- /dev/null
+++ b/src/integrationTest/resources/log4j2-test.properties
@@ -0,0 +1,44 @@
+status = info
+name = Integration test logging configuration
+packages = org.opensearch.test.framework.log
+
+appender.console.type = Console
+appender.console.name = consoleAppender
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %threadName %-5p %c{1}:%L - %m%n
+appender.console.filter.prerelease.type=RegexFilter
+appender.console.filter.prerelease.regex=.+\\Qis a pre-release version of OpenSearch and is not suitable for production\\E
+appender.console.filter.prerelease.onMatch=DENY
+appender.console.filter.prerelease.onMismatch=NEUTRAL
+
+appender.capturing.type = LogCapturingAppender
+appender.capturing.name = logCapturingAppender
+
+rootLogger.level = info
+rootLogger.appenderRef.stdout.ref = consoleAppender
+
+logger.testsecconfig.name = org.opensearch.test.framework.TestSecurityConfig
+logger.testsecconfig.level = info
+logger.localopensearchcluster.name=org.opensearch.test.framework.cluster.LocalOpenSearchCluster
+logger.localopensearchcluster.level = info
+
+logger.auditlogs.name=org.opensearch.test.framework.audit
+logger.auditlogs.level = info
+
+# Logger required by test org.opensearch.security.http.JwtAuthenticationTests
+logger.httpjwtauthenticator.name = com.amazon.dlic.auth.http.jwt.HTTPJwtAuthenticator
+logger.httpjwtauthenticator.level = debug
+logger.httpjwtauthenticator.appenderRef.capturing.ref = logCapturingAppender
+
+#Required by tests:
+# org.opensearch.security.IpBruteForceAttacksPreventionTests
+# org.opensearch.security.UserBruteForceAttacksPreventionTests
+logger.backendreg.name = org.opensearch.security.auth.BackendRegistry
+logger.backendreg.level = debug
+logger.backendreg.appenderRef.capturing.ref = logCapturingAppender
+
+#com.amazon.dlic.auth.ldap
+#logger.ldap.name=com.amazon.dlic.auth.ldap.backend.LDAPAuthenticationBackend
+logger.ldap.name=com.amazon.dlic.auth.ldap.backend
+logger.ldap.level=TRACE
+logger.ldap.appenderRef.capturing.ref = logCapturingAppender
diff --git a/src/integrationTest/resources/nodes_dn.yml b/src/integrationTest/resources/nodes_dn.yml
new file mode 100644
index 0000000000..437583b160
--- /dev/null
+++ b/src/integrationTest/resources/nodes_dn.yml
@@ -0,0 +1,4 @@
+---
+_meta:
+  type: "nodesdn"
+  config_version: 2
diff --git a/src/integrationTest/resources/roles.yml b/src/integrationTest/resources/roles.yml
new file mode 100644
index 0000000000..02de9bf3d5
--- /dev/null
+++ b/src/integrationTest/resources/roles.yml
@@ -0,0 +1,19 @@
+---
+_meta:
+  type: "roles"
+  config_version: 2
+user_admin__all_access:
+  cluster_permissions:
+    - "*"
+  index_permissions:
+    - index_patterns:
+        - "*"
+      allowed_actions:
+        - "*"
+user_limited-user__limited-role:
+  index_permissions:
+    - index_patterns:
+        - "user-${user.name}"
+      allowed_actions:
+        - "indices:data/read/get"
+        - "indices:data/read/search"
diff --git a/src/integrationTest/resources/roles_mapping.yml b/src/integrationTest/resources/roles_mapping.yml
new file mode 100644
index 0000000000..193f999176
--- /dev/null
+++ b/src/integrationTest/resources/roles_mapping.yml
@@ -0,0 +1,9 @@
+---
+_meta:
+  type: "rolesmapping"
+  config_version: 2
+
+readall:
+  reserved: false
+  backend_roles:
+  - "readall"
diff --git a/src/integrationTest/resources/security_tenants.yml b/src/integrationTest/resources/security_tenants.yml
new file mode 100644
index 0000000000..93b510dd16
--- /dev/null
+++ b/src/integrationTest/resources/security_tenants.yml
@@ -0,0 +1,4 @@
+---
+_meta:
+  type: "tenants"
+  config_version: 2
diff --git a/src/integrationTest/resources/tenants.yml b/src/integrationTest/resources/tenants.yml
new file mode 100644
index 0000000000..add18ebd54
--- /dev/null
+++ b/src/integrationTest/resources/tenants.yml
@@ -0,0 +1,8 @@
+---
+_meta:
+  type: "tenants"
+  config_version: 2
+
+admin_tenant:
+  reserved: false
+  description: "Test tenant for admin user"
diff --git a/src/integrationTest/resources/whitelist.yml b/src/integrationTest/resources/whitelist.yml
new file mode 100644
index 0000000000..866ffe9eb3
--- /dev/null
+++ b/src/integrationTest/resources/whitelist.yml
@@ -0,0 +1,4 @@
+---
+_meta:
+  type: "whitelist"
+  config_version: 2