diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 8076adcf00ca9..68d02d5f7d544 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1 +1,27 @@
-* @abbashus @adnapibar @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kartg @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @ryanbogan @sachinpkale @saratvemulapalli @setiah @shwetathareja @sohami @tlfeng @VachaShah
+# CODEOWNERS manages notifications, not PR approvals
+# For PR approvals see /.github/workflows/maintainer-approval.yml
+
+# Files have a single rule applied, the last match decides the owner
+# If you would like to more specifically apply ownership, include existing owner in new sub fields
+
+# To verify changes of CODEOWNERS file
+# In VSCode
+# 1. Install extension https://marketplace.visualstudio.com/items?itemName=jasonnutter.vscode-codeowners
+# 2. Go to a file
+# 3. Use the command palette to run the CODEOWNERS: Show owners of current file command, which will display all code owners for the current file.
+
+# Default ownership for all repo files
+* @abbashus @adnapibar @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kartg @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @ryanbogan @sachinpkale @saratvemulapalli @setiah @shwetathareja @sohami @tlfeng @VachaShah
+
+/modules/transport-netty4/ @peternied
+
+/plugins/identity-shiro/ @peternied
+
+/server/src/main/java/org/opensearch/extensions/ @peternied
+/server/src/main/java/org/opensearch/identity/ @peternied
+/server/src/main/java/org/opensearch/threadpool/ @peternied
+/server/src/main/java/org/opensearch/transport/ @peternied
+
+/.github/ @peternied
+
+/MAINTAINERS.md @abbashus @adnapibar @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kartg @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @ryanbogan @sachinpkale @saratvemulapalli @setiah @shwetathareja @sohami @tlfeng @VachaShah
diff --git a/.github/ISSUE_TEMPLATE/bug_template.md b/.github/ISSUE_TEMPLATE/bug_template.md
deleted file mode 100644
index be3ae51b237ee..0000000000000
--- a/.github/ISSUE_TEMPLATE/bug_template.md
+++ /dev/null
@@ -1,33 +0,0 @@
----
-name: 🐛 Bug report
-about: Create a report to help us improve
-title: "[BUG]"
-labels: 'bug, untriaged'
-assignees: ''
----
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Steps to reproduce the behavior:
-1. Go to '...'
-2. Click on '....'
-3. Scroll down to '....'
-4. See error
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Plugins**
-Please list all plugins currently enabled.
-
-**Screenshots**
-If applicable, add screenshots to help explain your problem.
-
-**Host/Environment (please complete the following information):**
- - OS: [e.g. iOS]
- - Version [e.g. 22]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/bug_template.yml b/.github/ISSUE_TEMPLATE/bug_template.yml
new file mode 100644
index 0000000000000..5f0798abe0f68
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_template.yml
@@ -0,0 +1,80 @@
+name: 🐛 Bug report
+description: Create a report to help us improve
+title: "[BUG]
"
+labels: ['bug, untriaged']
+body:
+ - type: textarea
+ attributes:
+ label: Describe the bug
+ description: A clear and concise description of what the bug is.
+ validations:
+ required: true
+ - type: dropdown
+ attributes:
+ label: Related component
+ description: Choose a specific OpenSearch component your bug belongs to. If you are unsure which to select or if the component is not present, select "Other".
+ multiple: false
+ options:
+ - # Empty first option to force selection
+ - Build
+ - Clients
+ - Cluster Manager
+ - Extensions
+ - Indexing:Performance
+ - Indexing:Replication
+ - Indexing
+ - Libraries
+ - Other
+ - Plugins
+ - Search:Aggregations
+ - Search:Performance
+ - Search:Query Capabilities
+ - Search:Query Insights
+ - Search:Relevance
+ - Search:Remote Search
+ - Search:Resiliency
+ - Search:Searchable Snapshots
+ - Search
+ - Storage:Durability
+ - Storage:Performance
+ - Storage:Remote
+ - Storage:Snapshots
+ - Storage
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: To Reproduce
+ description: Steps to reproduce the behavior.
+ value: |
+ 1. Go to '...'
+ 2. Click on '....'
+ 3. Scroll down to '....'
+ 4. See error
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Expected behavior
+ description: A clear and concise description of what you expected to happen.
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Additional Details
+ description: Add any other context about the problem here.
+ value: |
+ **Plugins**
+ Please list all plugins currently enabled.
+
+ **Screenshots**
+ If applicable, add screenshots to help explain your problem.
+
+ **Host/Environment (please complete the following information):**
+ - OS: [e.g. iOS]
+ - Version [e.g. 22]
+
+ **Additional context**
+ Add any other context about the problem here.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index 53b3614a34342..0000000000000
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,19 +0,0 @@
----
-name: 🎆 Feature request
-about: Suggest an idea for this project
-title: ''
-labels: 'enhancement, untriaged'
-assignees: ''
----
-
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
-
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen.
-
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
-
-**Additional context**
-Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 0000000000000..0159e771f7f80
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,63 @@
+name: 🎆 Feature request
+description: Suggest an idea for this project
+title: '[Feature Request] '
+labels: ['enhancement, untriaged']
+body:
+ - type: textarea
+ attributes:
+ label: Is your feature request related to a problem? Please describe
+ description: A clear and concise description of what the problem is.
+ placeholder: Ex. I'm always frustrated when [...]
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Describe the solution you'd like
+ description: A clear and concise description of what you want to happen.
+ validations:
+ required: true
+ - type: dropdown
+ attributes:
+ label: Related component
+ description: Choose a specific OpenSearch component your feature request belongs to. If you are unsure of which component to select or if the component is not present, select "Other".
+ multiple: false
+ options:
+ - # Empty first option to force selection
+ - Build
+ - Clients
+ - Cluster Manager
+ - Extensions
+ - Indexing:Performance
+ - Indexing:Replication
+ - Indexing
+ - Libraries
+ - Other
+ - Plugins
+ - Search:Aggregations
+ - Search:Performance
+ - Search:Query Capabilities
+ - Search:Query Insights
+ - Search:Relevance
+ - Search:Remote Search
+ - Search:Resiliency
+ - Search:Searchable Snapshots
+ - Search
+ - Storage:Durability
+ - Storage:Performance
+ - Storage:Remote
+ - Storage:Snapshots
+ - Storage
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Describe alternatives you've considered
+ description: A clear and concise description of any alternative solutions or features you've considered.
+ validations:
+ required: false
+ - type: textarea
+ attributes:
+ label: Additional context
+ description: Add any other context or screenshots about the feature request here.
+ validations:
+ required: false
diff --git a/.github/workflows/add-untriaged.yml b/.github/workflows/add-untriaged.yml
deleted file mode 100644
index 38de96f663051..0000000000000
--- a/.github/workflows/add-untriaged.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-name: Apply 'untriaged' label during issue lifecycle
-
-on:
- issues:
- types: [opened, reopened, transferred]
-
-jobs:
- apply-label:
- if: github.repository == 'opensearch-project/OpenSearch'
- runs-on: ubuntu-latest
- steps:
- - uses: actions/github-script@v7
- with:
- script: |
- github.rest.issues.addLabels({
- issue_number: context.issue.number,
- owner: context.repo.owner,
- repo: context.repo.repo,
- labels: ['untriaged']
- })
diff --git a/.github/workflows/check-compatibility.yml b/.github/workflows/check-compatibility.yml
index d93f7e73b91e7..d6c65ddd446cd 100644
--- a/.github/workflows/check-compatibility.yml
+++ b/.github/workflows/check-compatibility.yml
@@ -36,7 +36,7 @@ jobs:
echo "### Compatible components" >> "${{ github.workspace }}/results.txt" && grep -e 'Compatible component' $HOME/gradlew-check.out | sed -e 's/Compatible component: \[\(.*\)\]/- \1/' >> "${{ github.workspace }}/results.txt"
- name: Upload results
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: results.txt
path: ${{ github.workspace }}/results.txt
@@ -48,7 +48,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download results
- uses: actions/download-artifact@v3
+ uses: actions/download-artifact@v4
with:
name: results.txt
diff --git a/.github/workflows/create-documentation-issue.yml b/.github/workflows/create-documentation-issue.yml
index df63847f8afca..b45e053cc25c2 100644
--- a/.github/workflows/create-documentation-issue.yml
+++ b/.github/workflows/create-documentation-issue.yml
@@ -29,7 +29,7 @@ jobs:
- name: Create Issue From File
id: create-issue
- uses: peter-evans/create-issue-from-file@v4
+ uses: peter-evans/create-issue-from-file@v5
with:
title: Add documentation related to new feature
content-filepath: ./ci/documentation/issue.md
diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml
index ca026f530b4af..61962c91b4903 100644
--- a/.github/workflows/links.yml
+++ b/.github/workflows/links.yml
@@ -13,7 +13,7 @@ jobs:
- uses: actions/checkout@v4
- name: lychee Link Checker
id: lychee
- uses: lycheeverse/lychee-action@v1.8.0
+ uses: lycheeverse/lychee-action@v1.9.1
with:
args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes
fail: true
diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml
index 2f87afd372d90..fdc2bf16937b4 100644
--- a/.github/workflows/maintainer-approval.yml
+++ b/.github/workflows/maintainer-approval.yml
@@ -2,7 +2,6 @@ name: Maintainers approval
on:
pull_request_review:
- types: [submitted]
jobs:
maintainer-approved-check:
@@ -10,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: find-maintainers
- uses: actions/github-script@v7
+ uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
result-encoding: string
@@ -26,7 +25,7 @@ jobs:
return maintainersResponse.data.map(item => item.login).join(', ');
- - uses: peternied/required-approval@v1.2
+ - uses: peternied/required-approval@v1.3
with:
token: ${{ secrets.GITHUB_TOKEN }}
min-required: 1
diff --git a/.github/workflows/stalled.yml b/.github/workflows/stalled.yml
index 19ec9c9438bbe..d171332b402f1 100644
--- a/.github/workflows/stalled.yml
+++ b/.github/workflows/stalled.yml
@@ -17,7 +17,7 @@ jobs:
private_key: ${{ secrets.APP_PRIVATE_KEY }}
installation_id: 22958780
- name: Stale PRs
- uses: actions/stale@v8
+ uses: actions/stale@v9
with:
repo-token: ${{ steps.github_app_token.outputs.token }}
stale-pr-label: 'stalled'
diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml
new file mode 100644
index 0000000000000..83bf4926a8c2d
--- /dev/null
+++ b/.github/workflows/triage.yml
@@ -0,0 +1,34 @@
+name: Auto triage based on the component label in issue
+
+on:
+ issues:
+ types: [opened, reopened, transferred]
+
+jobs:
+ apply-label:
+ if: github.repository == 'opensearch-project/OpenSearch'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/github-script@v7.0.1
+ with:
+ script: |
+ const { issue, repository } = context.payload;
+ const { number, body } = issue;
+ const { owner, name } = repository;
+ const regex = /###\sRelated\scomponent\n\n(\w.*)\n/gm;
+ let match;
+ while ( ( match = regex.exec( body ) ) ) {
+ const [ , component_label ] = match;
+ await github.rest.issues.addLabels( {
+ owner: owner.login,
+ repo: name,
+ issue_number: number,
+ labels: [ `${ component_label }` ],
+ } );
+ }
+ github.rest.issues.addLabels({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ labels: ['untriaged']
+ })
diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml
index a20c671c137b2..be2a89ac931e9 100644
--- a/.github/workflows/version.yml
+++ b/.github/workflows/version.yml
@@ -1,28 +1,32 @@
name: Increment Version
on:
+ workflow_dispatch:
+ inputs:
+ tag:
+ description: 'the tag'
+ required: true
+ type: string
push:
tags:
- '*.*.*'
-permissions: {}
+permissions:
+ contents: write
+ issues: write
+ pull-requests: write
+
jobs:
build:
if: github.repository == 'opensearch-project/OpenSearch'
runs-on: ubuntu-latest
steps:
- - name: GitHub App token
- id: github_app_token
- uses: tibdex/github-app-token@v2.1.0
- with:
- app_id: ${{ secrets.APP_ID }}
- private_key: ${{ secrets.APP_PRIVATE_KEY }}
- installation_id: 22958780
-
- - uses: actions/checkout@v4
- - name: Fetch Tag and Version Information
+ - name: Fetch tag and version information
run: |
TAG=$(echo "${GITHUB_REF#refs/*/}")
+ if [ -n ${{ github.event.inputs.tag }} ]; then
+ TAG=${{ github.event.inputs.tag }}
+ fi
CURRENT_VERSION_ARRAY=($(echo "$TAG" | tr . '\n'))
BASE=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:2}")
BASE_X=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:1}.x")
@@ -44,24 +48,22 @@ jobs:
echo "NEXT_VERSION=$NEXT_VERSION" >> $GITHUB_ENV
echo "NEXT_VERSION_UNDERSCORE=$NEXT_VERSION_UNDERSCORE" >> $GITHUB_ENV
echo "NEXT_VERSION_ID=$NEXT_VERSION_ID" >> $GITHUB_ENV
+
- uses: actions/checkout@v4
with:
ref: ${{ env.BASE }}
- token: ${{ steps.github_app_token.outputs.token }}
- - name: Increment Patch Version
- run: |
- echo Incrementing $CURRENT_VERSION to $NEXT_VERSION
- echo " - \"$CURRENT_VERSION\"" >> .ci/bwcVersions
- sed -i "s/opensearch = $CURRENT_VERSION/opensearch = $NEXT_VERSION/g" buildSrc/version.properties
- echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE
- sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java
- sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" libs/core/src/main/java/org/opensearch/Version.java
+ - name: Increment Patch Version on Major.Minor branch
+ uses: peternied/opensearch-core-version-updater@v1
+ with:
+ previous-version: ${{ env.CURRENT_VERSION }}
+ new-version: ${{ env.NEXT_VERSION }}
+ update-current: true
- - name: Create Pull Request
+ - name: Create PR for BASE
+ id: base_pr
uses: peter-evans/create-pull-request@v5
with:
- token: ${{ steps.github_app_token.outputs.token }}
base: ${{ env.BASE }}
branch: 'create-pull-request/patch-${{ env.BASE }}'
commit-message: Increment version to ${{ env.NEXT_VERSION }}
@@ -76,19 +78,18 @@ jobs:
- uses: actions/checkout@v4
with:
ref: ${{ env.BASE_X }}
- token: ${{ steps.github_app_token.outputs.token }}
- - name: Add bwc version to .X branch
- run: |
- echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION
- sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions
- echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE
- sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java
+ - name: Add Patch Version on Major.X branch
+ uses: peternied/opensearch-core-version-updater@v1
+ with:
+ previous-version: ${{ env.CURRENT_VERSION }}
+ new-version: ${{ env.NEXT_VERSION }}
+ update-current: false
- - name: Create Pull Request
+ - name: Create PR for BASE_X
+ id: base_x_pr
uses: peter-evans/create-pull-request@v5
with:
- token: ${{ steps.github_app_token.outputs.token }}
base: ${{ env.BASE_X }}
branch: 'create-pull-request/patch-${{ env.BASE_X }}'
commit-message: Add bwc version ${{ env.NEXT_VERSION }}
@@ -103,19 +104,18 @@ jobs:
- uses: actions/checkout@v4
with:
ref: main
- token: ${{ steps.github_app_token.outputs.token }}
- - name: Add bwc version to main branch
- run: |
- echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION
- sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions
- echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE
- sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java
+ - name: Add Patch Version on main branch
+ uses: peternied/opensearch-core-version-updater@v1
+ with:
+ previous-version: ${{ env.CURRENT_VERSION }}
+ new-version: ${{ env.NEXT_VERSION }}
+ update-current: false
- - name: Create Pull Request
+ - name: Create PR for main
+ id: main_pr
uses: peter-evans/create-pull-request@v5
with:
- token: ${{ steps.github_app_token.outputs.token }}
base: main
branch: 'create-pull-request/patch-main'
commit-message: Add bwc version ${{ env.NEXT_VERSION }}
@@ -126,3 +126,32 @@ jobs:
title: '[AUTO] [main] Add bwc version ${{ env.NEXT_VERSION }}.'
body: |
I've noticed that a new tag ${{ env.TAG }} was pushed, and added a bwc version ${{ env.NEXT_VERSION }}.
+
+ - name: Create tracking issue
+ id: create-issue
+ uses: actions/github-script@v7.0.1
+ with:
+ script: |
+ const body = `
+ ### Description
+ A new version of OpenSearch was released, to prepare for the next release new version numbers need to be updated in all active branches of development.
+
+ ### Exit Criteria
+ Review and merged the following pull requests
+ - [ ] ${{ steps.base_pr.outputs.pull-request-url }}
+ - [ ] ${{ steps.base_x_pr.outputs.pull-request-url }}
+ - [ ] ${{ steps.main_pr.outputs.pull-request-url }}
+
+ ### Additional Context
+ See project wide guidance on branching and versions [[link]](https://github.com/opensearch-project/.github/blob/main/RELEASING.md).
+ `
+ const { data: issue }= await github.rest.issues.create({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ labels: ["Build"],
+ title: "Increment version for ${{ env.NEXT_VERSION }}",
+ body: body
+ });
+ console.error(JSON.stringify(issue));
+ return issue.number;
+ result-encoding: string
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7a8511e6f0c5e..d37b02eb8de73 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -9,14 +9,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add getter for path field in NestedQueryBuilder ([#4636](https://github.com/opensearch-project/OpenSearch/pull/4636))
- Allow mmap to use new JDK-19 preview APIs in Apache Lucene 9.4+ ([#5151](https://github.com/opensearch-project/OpenSearch/pull/5151))
- Add events correlation engine plugin ([#6854](https://github.com/opensearch-project/OpenSearch/issues/6854))
-- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107))
- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679), [#10664](https://github.com/opensearch-project/OpenSearch/pull/10664))
- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
- [AdmissionControl] Added changes for AdmissionControl Interceptor and AdmissionControlService for RateLimiting ([#9286](https://github.com/opensearch-project/OpenSearch/pull/9286))
- GHA to verify checklist items completion in PR descriptions ([#10800](https://github.com/opensearch-project/OpenSearch/pull/10800))
- Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625))
- [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887))
-- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378))
+- [S3 Repository] Add setting to control connection count for sync client ([#12028](https://github.com/opensearch-project/OpenSearch/pull/12028))
### Dependencies
- Bump `log4j-core` from 2.18.0 to 2.19.0
@@ -46,7 +45,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bump `org.bouncycastle:bcmail-jdk15on` to `org.bouncycastle:bcmail-jdk15to18` version 1.75 ([#8247](https://github.com/opensearch-project/OpenSearch/pull/8247))
- Bump `org.bouncycastle:bcpkix-jdk15on` to `org.bouncycastle:bcpkix-jdk15to18` version 1.75 ([#8247](https://github.com/opensearch-project/OpenSearch/pull/8247))
- Bump JNA version from 5.5 to 5.13 ([#9963](https://github.com/opensearch-project/OpenSearch/pull/9963))
-- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822))
- Bump `org.eclipse.jgit` from 6.5.0 to 6.7.0 ([#10147](https://github.com/opensearch-project/OpenSearch/pull/10147))
- Bump OpenTelemetry from 1.30.1 to 1.31.0 ([#10617](https://github.com/opensearch-project/OpenSearch/pull/10617))
- Bump OpenTelemetry from 1.31.0 to 1.32.0 and OpenTelemetry Semconv from 1.21.0-alpha to 1.23.1-alpha ([#11305](https://github.com/opensearch-project/OpenSearch/pull/11305))
@@ -59,9 +57,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Improve summary error message for invalid setting updates ([#4792](https://github.com/opensearch-project/OpenSearch/pull/4792))
- Return 409 Conflict HTTP status instead of 503 on failure to concurrently execute snapshots ([#8986](https://github.com/opensearch-project/OpenSearch/pull/5855))
- Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/))
-- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558))
- Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894))
-
+- Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728))
### Deprecated
@@ -85,8 +82,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix 'org.apache.hc.core5.http.ParseException: Invalid protocol version' under JDK 16+ ([#4827](https://github.com/opensearch-project/OpenSearch/pull/4827))
- Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944))
- Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993))
-- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873))
-- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249))
+- Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439))
+- Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836))
+- Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872))
+- [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035))
### Security
@@ -105,6 +104,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255))
- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351))
- Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670))
+- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967), [#11983](https://github.com/opensearch-project/OpenSearch/pull/11983))
+- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275))
+- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753))
- [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853))
- Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247))
- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248))
@@ -112,62 +114,120 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679))
- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
- [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672))
+- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210))
- Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024))
- Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650))
- Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040))
- Allowing pipeline processors to access index mapping info by passing ingest service ref as part of the processor factory parameters ([#10307](https://github.com/opensearch-project/OpenSearch/pull/10307))
+- Add experimental SIMD implementation of B-tree to round down dates ([#11194](https://github.com/opensearch-project/OpenSearch/issues/11194))
- Make number of segment metadata files in remote segment store configurable ([#11329](https://github.com/opensearch-project/OpenSearch/pull/11329))
- Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317))
- Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069))
- [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175))
+- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378))
+- Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170))
+- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591))
+- Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583))
+- Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039))
+- Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870))
+- Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170))
+- Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891))
+- Add remove_by_pattern ingest processor ([#11920](https://github.com/opensearch-project/OpenSearch/pull/11920))
+- Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563))
+- Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876))
+- New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465))
+- Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394))
+- Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074))
+- Enable Fuzzy codec for doc id fields using a bloom filter ([#11022](https://github.com/opensearch-project/OpenSearch/pull/11022))
+- [Metrics Framework] Adds support for Histogram metric ([#12062](https://github.com/opensearch-project/OpenSearch/pull/12062))
+- Support for returning scores in matched queries ([#11626](https://github.com/opensearch-project/OpenSearch/pull/11626))
- Projected Views ([#11957](https://github.com/opensearch-project/OpenSearch/pull/11957))
### Dependencies
+- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822))
- Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276))
-- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446))
+- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796))
- Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298))
-- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.5.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295))
+- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.8.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630), [#12167](https://github.com/opensearch-project/OpenSearch/pull/12167))
- Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506))
- Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508))
- Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639))
- Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297))
- Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635))
-- Bump `com.squareup.okio:okio` from 3.5.0 to 3.6.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637))
-- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.0 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270))
+- Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632))
+- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.1 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270), [#11695](https://github.com/opensearch-project/OpenSearch/pull/11695))
- Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504))
- Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171))
-- Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271))
+- Bump `actions/github-script` from 6 to 7.0.1 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271), [#12166](https://github.com/opensearch-project/OpenSearch/pull/12166))
- Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273))
-- Bump `netty` from 4.1.100.Final to 4.1.101.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294))
-- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.5 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163))
+- Bump `netty` from 4.1.100.Final to 4.1.106.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)), [#12034](https://github.com/opensearch-project/OpenSearch/pull/12034))
+- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692))
- Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861))
- Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344))
-- Bump `reactor-netty-core` from 1.1.12 to 1.1.13 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350))
-- Bump `com.gradle.enterprise` from 3.14.1 to 3.15.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339))
+- Bump `reactor-netty-core` from 1.1.12 to 1.1.15 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)), ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042))
+- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.2 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629), [#12056](https://github.com/opensearch-project/OpenSearch/pull/12056))
- Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447))
- Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450))
- Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445))
- Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448))
- Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521))
+- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539))
+- Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555))
+- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556))
+- Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557))
+- Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671))
+- Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996))
+- Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559))
+- Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691))
+- Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693))
+- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798))
+- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887))
+- Bump `Lucene` from 9.8.0 to 9.9.2 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)), ([#12063](https://github.com/opensearch-project/OpenSearch/pull/12063))
+- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963))
+- Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.42.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794), [#12165](https://github.com/opensearch-project/OpenSearch/pull/12165))
+- Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960))
+- Bump `com.diffplug.spotless` from 6.23.2 to 6.25.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962), [#12055](https://github.com/opensearch-project/OpenSearch/pull/12055))
+- Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961))
+- Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042))
+- Bump `com.google.http-client:google-http-client-jackson2` from 1.43.3 to 1.44.1 ([#12059](https://github.com/opensearch-project/OpenSearch/pull/12059))
+- Bump `peter-evans/create-issue-from-file` from 4 to 5 ([#12057](https://github.com/opensearch-project/OpenSearch/pull/12057))
+- Bump `org.gradle.test-retry` from 1.5.4 to 1.5.8 ([#12168](https://github.com/opensearch-project/OpenSearch/pull/12168))
### Changed
- Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840))
- Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036))
-- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562))
+- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)), ([#11751](https://github.com/opensearch-project/OpenSearch/pull/11751))
+- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107))
- Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598))
- [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524))
- [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642))
- Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395))
+- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558))
- Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273))
-- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895))
-- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023))
+- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895), [#11607](https://github.com/opensearch-project/OpenSearch/pull/11607))
- Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057))
+- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023))
- Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083))
+- Apply the fast filter optimization to composite aggregation of date histogram source ([#11505](https://github.com/opensearch-project/OpenSearch/pull/11083))
- Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087))
- Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528))
+- Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209))
+- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312))
- Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308))
- Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362))
-- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312))
+- Allow composite aggregation to run under a parent filter aggregation ([#11499](https://github.com/opensearch-project/OpenSearch/pull/11499))
+- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512))
+- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562))
+- Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678))
+- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582))
+- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732))
+- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526))
+- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890))
+- Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)), ([#12000](https://github.com/opensearch-project/OpenSearch/pull/12000))
+- Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968))
+- Updates IpField to be searchable when only `doc_values` are enabled ([#11508](https://github.com/opensearch-project/OpenSearch/pull/11508))
+- [Query Insights] Query Insights Framework which currently supports retrieving the most time-consuming queries within the last configured time window ([#11903](https://github.com/opensearch-project/OpenSearch/pull/11903))
+- [Query Insights] Implement Top N Queries feature to collect and gather information about high latency queries in a window ([#11904](https://github.com/opensearch-project/OpenSearch/pull/11904))
### Deprecated
@@ -176,18 +236,31 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
### Fixed
- Fix failure in dissect ingest processor parsing empty brackets ([#9225](https://github.com/opensearch-project/OpenSearch/pull/9255))
-- Fix class_cast_exception when passing int to _version and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101))
+- Fix `class_cast_exception` when passing int to `_version` and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101))
- Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370))
- Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496))
- Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737))
-- Fix SuggestSearch.testSkipDuplicates by forceing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068))
- Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543))
- Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934))
-- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152))
+- Fix SuggestSearch.testSkipDuplicates by forcing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068))
+- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873))
+- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249))
+- Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316))
- Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369))
- Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167))
+- Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425))
- Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238))
+- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485),[#11917](https://github.com/opensearch-project/OpenSearch/pull/11917))
+- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152))
- Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417))
+- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609))
+- Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711))
+- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490))
+- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815))
+- Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953))
+- Prevent setting remote_snapshot store type on index creation ([#11867](https://github.com/opensearch-project/OpenSearch/pull/11867))
+- [BUG] Fix remote shards balancer when filtering throttled nodes ([#11724](https://github.com/opensearch-project/OpenSearch/pull/11724))
+- Add advance(int) for numeric values in order to allow point based optimization to kick in ([#12089](https://github.com/opensearch-project/OpenSearch/pull/12089))
### Security
diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md
index f9936aad0cf8c..f0851fc58d444 100644
--- a/DEVELOPER_GUIDE.md
+++ b/DEVELOPER_GUIDE.md
@@ -183,6 +183,12 @@ Run OpenSearch using `gradlew run`.
./gradlew run
```
+[Plugins](plugins/) may be installed by passing a `-PinstalledPlugins` property:
+
+```bash
+./gradlew run -PinstalledPlugins="['plugin1', 'plugin2']"
+```
+
That will build OpenSearch and start it, writing its log above Gradle's status message. We log a lot of stuff on startup, specifically these lines tell you that OpenSearch is ready.
```
@@ -342,7 +348,7 @@ Please follow these formatting guidelines:
* Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail.
* If *absolutely* necessary, you can disable formatting for regions of code with the `// tag::NAME` and `// end::NAME` directives, but note that these are intended for use in documentation, so please make it clear what you have done, and only do this where the benefit clearly outweighs the decrease in consistency.
* Note that JavaDoc and block comments i.e. `/* ... */` are not formatted, but line comments i.e `// ...` are.
-* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, if might get called out in PR reviews as something to change.
+* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, it might get called out in PR reviews as something to change.
## Adding Dependencies
@@ -578,7 +584,7 @@ explicitly marked by an annotation should not be extended by external implementa
any time. The `@DeprecatedApi` annotation could also be added to any classes annotated with `@PublicApi` (or documented as `@opensearch.api`) or their methods that
are either changed (with replacement) or planned to be removed across major versions.
-The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`)
+The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`)
annotation. The presence of this annotation signals that API may change at any time (major, minor or even patch releases). In general, the classes annotated with
`@PublicApi` may expose other classes or methods annotated with `@ExperimentalApi`, in such cases the backward compatibility guarantees would not apply to latter
(see please [Experimental Development](#experimental-development) for more details).
diff --git a/NOTICE.txt b/NOTICE.txt
index 6c7dc983f8c7a..d463b8f28561f 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -10,3 +10,6 @@ Foundation (http://www.apache.org/).
This product includes software developed by
Joda.org (http://www.joda.org/).
+
+This product includes software developed by
+Morten Haraldsen (ethlo) (https://github.com/ethlo) under the Apache License, version 2.0.
diff --git a/TRIAGING.md b/TRIAGING.md
new file mode 100644
index 0000000000000..47cb44a4f5ba2
--- /dev/null
+++ b/TRIAGING.md
@@ -0,0 +1,83 @@
+
+
+The maintainers of the OpenSearch Repo seek to promote an inclusive and engaged community of contributors. In order to facilitate this, weekly triage meetings are open-to-all and attendance is encouraged for anyone who hopes to contribute, discuss an issue, or learn more about the project. To learn more about contributing to the OpenSearch Repo visit the [Contributing](./CONTRIBUTING.md) documentation.
+
+### Do I need to attend for my issue to be addressed/triaged?
+
+Attendance is not required for your issue to be triaged or addressed. If not accepted the issue will be updated with a comment for next steps. All new issues are triaged weekly.
+
+You can track if your issue was triaged by watching your GitHub notifications for updates.
+
+### What happens if my issue does not get covered this time?
+
+Each meeting we seek to address all new issues. However, should we run out of time before your issue is discussed, you are always welcome to attend the next meeting or to follow up on the issue post itself.
+
+### How do I join the Triage meeting?
+
+Meetings are hosted regularly at 10:00a - 10:55a Central Time every Wednesday and can be joined via [Chime](https://aws.amazon.com/chime/), with this [meeting link](https://chime.aws/1988437365).
+
+After joining the Chime meeting, you can enable your video / voice to join the discussion. If you do not have a webcam or microphone available, you can still join in via the text chat.
+
+If you have an issue you'd like to bring forth please prepare a link to the issue so it can be presented and viewed by everyone in the meeting.
+
+### Is there an agenda for each week?
+
+Meetings are 55 minutes and follows this structure:
+
+Yes, each 55-minute meeting follows this structure:
+1. **Initial Gathering:** Feel free to turn on your video and engage in informal conversation. Shortly, a volunteer triage [facilitator](#what-is-the-role-of-the-facilitator) will begin the meeting and share their screen.
+2. **Record Attendees:** The facilitator will request attendees to share their GitHub profile links. These links will be collected and assembled into a [tag](#how-do-triage-facilitator-tag-comments-during-the-triage-meeting) to annotate comments during the meeting.
+3. **Announcements:** Any announcements will be made at the beginning of the meeting.
+4. **Review of New Issues:** We start by reviewing all untriaged [issues](https://github.com/search?q=label%3Auntriaged+is%3Aopen++repo%3Aopensearch-project%2FOpenSearch+&type=issues&ref=advsearch&s=created&o=desc) for the OpenSearch repo.
+5. **Attendee Requests:** An opportunity for any meeting member to request consideration of an issue or pull request.
+6. **Open Discussion:** Attendees can bring up any topics not already covered by filed issues or pull requests.
+
+### What is the role of the facilitator?
+
+The facilitator is crucial in driving the meeting, ensuring a smooth flow of issues into OpenSearch for future contributions. They maintain the meeting's agenda, solicit input from attendees, and record outcomes using the triage tag as items are discussed.
+
+### Do I need to have already contributed to the project to attend a triage meeting?
+
+No prior contributions are required. All interested individuals are welcome and encouraged to attend. Triage meetings offer a fantastic opportunity for new contributors to understand the project and explore various contribution avenues.
+
+### What if I have an issue that is almost a duplicate, should I open a new one to be triaged?
+
+You can always open an [issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) including one that you think may be a duplicate. If you believe your issue is similar but distinct from an existing one, you are encouraged to file it and explain the differences during the triage meeting.
+
+### What if I have follow-up questions on an issue?
+
+If you have an existing issue you would like to discuss, you can always comment on the issue itself. Alternatively, you are welcome to come to the triage meeting to discuss.
+
+### Is this meeting a good place to get help setting up features on my OpenSearch instance?
+
+While we are always happy to help the community, the best resource for implementation questions is [the OpenSearch forum](https://forum.opensearch.org/).
+
+There you can find answers to many common questions as well as speak with implementation experts.
+
+### What are the issue labels associated with triaging?
+
+Yes, there are several labels that are used to identify the 'state' of issues filed in OpenSearch .
+| Label | When Applied | Meaning |
+|---------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------|
+| `Untriaged` | When issues are created or re-opened. | Issues labeled as 'Untriaged' require the attention of the repository maintainers and may need to be prioritized for quicker resolution. It's crucial to keep the count of 'Untriaged' labels low to ensure all potential security issues are addressed in a timely manner. See [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) for more details on handling these issues. |
+| `Help Wanted` | Anytime. | Issues marked as 'Help Wanted' signal that they are actionable and not the current focus of the project maintainers. Community contributions are especially encouraged for these issues. |
+| `Good First Issue` | Anytime. | Issues labeled as 'Good First Issue' are small in scope and can be resolved with a single pull request. These are recommended starting points for newcomers looking to make their first contributions. |
+
+### What are the typical outcomes of a triaged issue?
+
+| Outcome | Label | Description | Canned Response |
+|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." |
+| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to re-open after addressing the reason." |
+| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" |
+| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. |
+
+### Is this where I should bring up potential security vulnerabilities?
+
+Due to the sensitive nature of security vulnerabilities, please report all potential vulnerabilities directly by following the steps outlined on the [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) document.
+
+### How do triage facilitator tag comments during the triage meeting?
+
+During the triage meeting, facilitators should use the tag _[Triage - attendees [1](#Profile_link) [2](#Profile_link)]_ to indicate a collective decision. This ensures contributors know the decision came from the meeting rather than an individual and identifies participants for any follow-up queries.
+
+This tag should not be used outside triage meetings.
diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle
index 6b4634c7e791c..be4579b4e5324 100644
--- a/benchmarks/build.gradle
+++ b/benchmarks/build.gradle
@@ -84,3 +84,45 @@ spotless {
targetExclude 'src/main/generated/**/*.java'
}
}
+
+if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) {
+ // Add support for incubator modules on supported Java versions.
+ run.jvmArgs += ['--add-modules=jdk.incubator.vector']
+ run.classpath += files(jar.archiveFile)
+ run.classpath -= sourceSets.main.output
+ evaluationDependsOn(':libs:opensearch-common')
+
+ sourceSets {
+ java20 {
+ java {
+ srcDirs = ['src/main/java20']
+ }
+ }
+ }
+
+ configurations {
+ java20Implementation.extendsFrom(implementation)
+ }
+
+ dependencies {
+ java20Implementation sourceSets.main.output
+ java20Implementation project(':libs:opensearch-common').sourceSets.java20.output
+ java20AnnotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh"
+ }
+
+ compileJava20Java {
+ targetCompatibility = JavaVersion.VERSION_20
+ options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
+ }
+
+ jar {
+ metaInf {
+ into 'versions/20'
+ from sourceSets.java20.output
+ }
+ manifest.attributes('Multi-Release': 'true')
+ }
+
+ // classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
+ disableTasks('forbiddenApisJava20')
+}
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java
new file mode 100644
index 0000000000000..4e995f5a5067c
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java
@@ -0,0 +1,67 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.index.codec.fuzzy;
+
+import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.UUIDs;
+import org.opensearch.index.codec.fuzzy.FuzzySet;
+import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
+import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
+import org.opensearch.index.mapper.IdFieldMapper;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+@Fork(3)
+@Warmup(iterations = 2)
+@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+public class FilterConstructionBenchmark {
+
+ private List items;
+
+ @Param({ "1000000", "10000000", "50000000" })
+ private int numIds;
+
+ @Param({ "0.0511", "0.1023", "0.2047" })
+ private double fpp;
+
+ private FuzzySetFactory fuzzySetFactory;
+ private String fieldName;
+
+ @Setup
+ public void setupIds() {
+ this.fieldName = IdFieldMapper.NAME;
+ this.items = IntStream.range(0, numIds).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
+ FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
+ this.fuzzySetFactory = new FuzzySetFactory(Map.of(fieldName, parameters));
+ }
+
+ @Benchmark
+ public FuzzySet buildFilter() throws IOException {
+ return fuzzySetFactory.createFuzzySet(items.size(), fieldName, () -> items.iterator());
+ }
+}
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java
new file mode 100644
index 0000000000000..383539219830e
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java
@@ -0,0 +1,80 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.index.codec.fuzzy;
+
+import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.UUIDs;
+import org.opensearch.index.codec.fuzzy.FuzzySet;
+import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
+import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
+import org.opensearch.index.mapper.IdFieldMapper;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+@Fork(3)
+@Warmup(iterations = 2)
+@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+public class FilterLookupBenchmark {
+
+ @Param({ "50000000", "1000000" })
+ private int numItems;
+
+ @Param({ "1000000" })
+ private int searchKeyCount;
+
+ @Param({ "0.0511", "0.1023", "0.2047" })
+ private double fpp;
+
+ private FuzzySet fuzzySet;
+ private List items;
+ private Random random = new Random();
+
+ @Setup
+ public void setupFilter() throws IOException {
+ String fieldName = IdFieldMapper.NAME;
+ items = IntStream.range(0, numItems).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
+ FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
+ fuzzySet = new FuzzySetFactory(Map.of(fieldName, parameters)).createFuzzySet(numItems, fieldName, () -> items.iterator());
+ }
+
+ @Benchmark
+ public void contains_withExistingKeys(Blackhole blackhole) throws IOException {
+ for (int i = 0; i < searchKeyCount; i++) {
+ blackhole.consume(fuzzySet.contains(items.get(random.nextInt(items.size()))) == FuzzySet.Result.MAYBE);
+ }
+ }
+
+ @Benchmark
+ public void contains_withRandomKeys(Blackhole blackhole) throws IOException {
+ for (int i = 0; i < searchKeyCount; i++) {
+ blackhole.consume(fuzzySet.contains(new BytesRef(UUIDs.base64UUID())));
+ }
+ }
+}
diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java
index 4e07af452968b..3909a3f4eb8fc 100644
--- a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java
+++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java
@@ -21,7 +21,6 @@
import org.openjdk.jmh.infra.Blackhole;
import java.util.Random;
-import java.util.function.Supplier;
@Fork(value = 3)
@Warmup(iterations = 3, time = 1)
@@ -83,17 +82,17 @@ public static class Options {
"256" })
public Integer size;
- @Param({ "binary", "linear" })
+ @Param({ "binary", "linear", "btree" })
public String type;
@Param({ "uniform", "skewed_edge", "skewed_center" })
public String distribution;
public long[] queries;
- public Supplier supplier;
+ public RoundableSupplier supplier;
@Setup
- public void setup() {
+ public void setup() throws ClassNotFoundException {
Random random = new Random(size);
long[] values = new long[size];
for (int i = 1; i < values.length; i++) {
@@ -128,16 +127,7 @@ public void setup() {
throw new IllegalArgumentException("invalid distribution: " + distribution);
}
- switch (type) {
- case "binary":
- supplier = () -> new BinarySearcher(values, size);
- break;
- case "linear":
- supplier = () -> new BidirectionalLinearSearcher(values, size);
- break;
- default:
- throw new IllegalArgumentException("invalid type: " + type);
- }
+ supplier = new RoundableSupplier(type, values, size);
}
private static long nextPositiveLong(Random random) {
diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java
new file mode 100644
index 0000000000000..44ac42810996f
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import java.util.function.Supplier;
+
+public class RoundableSupplier implements Supplier {
+ private final Supplier delegate;
+
+ RoundableSupplier(String type, long[] values, int size) throws ClassNotFoundException {
+ switch (type) {
+ case "binary":
+ delegate = () -> new BinarySearcher(values, size);
+ break;
+ case "linear":
+ delegate = () -> new BidirectionalLinearSearcher(values, size);
+ break;
+ case "btree":
+ throw new ClassNotFoundException("BtreeSearcher is not supported below JDK 20");
+ default:
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+ }
+
+ @Override
+ public Roundable get() {
+ return delegate.get();
+ }
+}
diff --git a/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java
new file mode 100644
index 0000000000000..e81c1b137bd30
--- /dev/null
+++ b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java
@@ -0,0 +1,36 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import java.util.function.Supplier;
+
+public class RoundableSupplier implements Supplier {
+ private final Supplier delegate;
+
+ RoundableSupplier(String type, long[] values, int size) {
+ switch (type) {
+ case "binary":
+ delegate = () -> new BinarySearcher(values, size);
+ break;
+ case "linear":
+ delegate = () -> new BidirectionalLinearSearcher(values, size);
+ break;
+ case "btree":
+ delegate = () -> new BtreeSearcher(values, size);
+ break;
+ default:
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+ }
+
+ @Override
+ public Roundable get() {
+ return delegate.get();
+ }
+}
diff --git a/build.gradle b/build.gradle
index b1cd1d532bfeb..6f9aa0ea9e439 100644
--- a/build.gradle
+++ b/build.gradle
@@ -54,8 +54,8 @@ plugins {
id 'lifecycle-base'
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
- id "com.diffplug.spotless" version "6.20.0" apply false
- id "org.gradle.test-retry" version "1.5.4" apply false
+ id "com.diffplug.spotless" version "6.25.0" apply false
+ id "org.gradle.test-retry" version "1.5.8" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
}
@@ -545,6 +545,7 @@ subprojects {
includeClasses.add("org.opensearch.snapshots.SnapshotStatusApisIT")
includeClasses.add("org.opensearch.test.rest.ClientYamlTestSuiteIT")
includeClasses.add("org.opensearch.upgrade.DetectEsInstallationTaskTests")
+ includeClasses.add("org.opensearch.cluster.MinimumClusterManagerNodesIT")
}
}
}
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index b8db8504d5b85..0562ecc6ee61b 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -103,40 +103,36 @@ dependencies {
api localGroovy()
api 'commons-codec:commons-codec:1.16.0'
- api 'org.apache.commons:commons-compress:1.24.0'
+ api 'org.apache.commons:commons-compress:1.25.0'
api 'org.apache.ant:ant:1.10.14'
api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0'
- api 'com.netflix.nebula:nebula-publishing-plugin:20.3.0'
+ api 'com.netflix.nebula:nebula-publishing-plugin:21.0.0'
api 'com.netflix.nebula:gradle-info-plugin:12.1.6'
api 'org.apache.rat:apache-rat:0.15'
api 'commons-io:commons-io:2.15.1'
- api "net.java.dev.jna:jna:5.13.0"
+ api "net.java.dev.jna:jna:5.14.0"
api 'com.github.johnrengelman:shadow:8.1.1'
api 'org.jdom:jdom2:2.0.6.1'
api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}"
api 'de.thetaphi:forbiddenapis:3.6'
- api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.5'
+ api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6'
api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}"
api 'org.apache.maven:maven-model:3.9.6'
- api 'com.networknt:json-schema-validator:1.0.86'
+ api 'com.networknt:json-schema-validator:1.2.0'
api 'org.jruby.jcodings:jcodings:1.0.58'
api 'org.jruby.joni:joni:2.2.1'
api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}"
- api "org.ajoberstar.grgit:grgit-core:5.2.0"
+ api "org.ajoberstar.grgit:grgit-core:5.2.1"
testFixturesApi "junit:junit:${props.getProperty('junit')}"
testFixturesApi "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}"
testFixturesApi gradleApi()
testFixturesApi gradleTestKit()
- testImplementation 'org.wiremock:wiremock-standalone:3.1.0'
+ testImplementation 'org.wiremock:wiremock-standalone:3.3.1'
testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}"
integTestImplementation('org.spockframework:spock-core:2.3-groovy-3.0') {
exclude module: "groovy"
}
- implementation('org.ajoberstar.grgit:grgit-core:5.2.0') {
- exclude group: 'org.eclipse.jgit', module: 'org.eclipse.jgit'
- }
- implementation 'org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r'
}
configurations.all {
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
index 63b88f671c84c..8ecfbf40b6c62 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
@@ -94,7 +94,7 @@ public static void configureRepositories(Project project) {
String revision = matcher.group(1);
MavenArtifactRepository luceneRepo = repos.maven(repo -> {
repo.setName("lucene-snapshots");
- repo.setUrl("https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/");
+ repo.setUrl("https://artifacts.opensearch.org/snapshots/lucene/");
});
repos.exclusiveContent(exclusiveRepo -> {
exclusiveRepo.filter(
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
index 1ad7e056b6ae6..bc44f81a81aff 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java
@@ -77,9 +77,9 @@
import java.util.stream.Stream;
public class DistroTestPlugin implements Plugin {
- private static final String SYSTEM_JDK_VERSION = "17.0.9+9";
+ private static final String SYSTEM_JDK_VERSION = "21.0.2+13";
private static final String SYSTEM_JDK_VENDOR = "adoptium";
- private static final String GRADLE_JDK_VERSION = "17.0.9+9";
+ private static final String GRADLE_JDK_VERSION = "21.0.2+13";
private static final String GRADLE_JDK_VENDOR = "adoptium";
// all distributions used by distro tests. this is temporary until tests are per distribution
diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
index f24b61ef0d165..351b42e5bc921 100644
--- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
+++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
@@ -17,7 +17,7 @@ repositories {
}
dependencies {
- implementation "org.apache.logging.log4j:log4j-core:2.22.0"
+ implementation "org.apache.logging.log4j:log4j-core:2.22.1"
}
["0.0.1", "0.0.2"].forEach { v ->
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 74d655cfb1045..95ae1ddb578a1 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,14 +1,14 @@
opensearch = 3.0.0
-lucene = 9.8.0
+lucene = 9.9.2
bundled_jdk_vendor = adoptium
-bundled_jdk = 21.0.1+12
+bundled_jdk = 21.0.2+13
# optional dependencies
spatial4j = 0.7
jts = 1.15.0
-jackson = 2.16.0
-jackson_databind = 2.16.0
+jackson = 2.16.1
+jackson_databind = 2.16.1
snakeyaml = 2.1
icu4j = 70.1
supercsv = 2.4.0
@@ -26,12 +26,12 @@ jakarta_annotation = 1.3.5
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 5.13.0
-netty = 4.1.101.Final
+netty = 4.1.106.Final
joda = 2.12.2
# project reactor
-reactor_netty = 1.1.13
-reactor = 3.5.11
+reactor_netty = 1.1.15
+reactor = 3.5.14
# client dependencies
httpclient5 = 5.2.1
@@ -55,7 +55,7 @@ bouncycastle=1.76
randomizedrunner = 2.7.1
junit = 4.13.2
hamcrest = 2.1
-mockito = 5.5.0
+mockito = 5.10.0
objenesis = 3.2
bytebuddy = 1.14.7
@@ -70,5 +70,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final
# opentelemetry dependencies
-opentelemetry = 1.32.0
+opentelemetry = 1.34.1
opentelemetrysemconv = 1.23.1-alpha
diff --git a/client/rest/build.gradle b/client/rest/build.gradle
index ff3c322c5ccf7..f18df65dfddfa 100644
--- a/client/rest/build.gradle
+++ b/client/rest/build.gradle
@@ -34,8 +34,8 @@ apply plugin: 'opensearch.build'
apply plugin: 'opensearch.publish'
java {
- targetCompatibility = JavaVersion.VERSION_11
- sourceCompatibility = JavaVersion.VERSION_11
+ targetCompatibility = JavaVersion.VERSION_1_8
+ sourceCompatibility = JavaVersion.VERSION_1_8
}
base {
@@ -109,3 +109,10 @@ thirdPartyAudit.ignoreMissingClasses(
'javax.servlet.ServletContextEvent',
'javax.servlet.ServletContextListener'
)
+
+tasks.withType(JavaCompile) {
+ // Suppressing '[options] target value 8 is obsolete and will be removed in a future release'
+ configure(options) {
+ options.compilerArgs << '-Xlint:-options'
+ }
+}
diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java
index 7691c01daefea..15905add76c4f 100644
--- a/client/rest/src/main/java/org/opensearch/client/RestClient.java
+++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java
@@ -1116,9 +1116,15 @@ public long getContentLength() {
if (chunkedEnabled.get()) {
return -1L;
} else {
- long size;
+ long size = 0;
+ final byte[] buf = new byte[8192];
+ int nread = 0;
+
try (InputStream is = getContent()) {
- size = is.readAllBytes().length;
+ // read to EOF which may read more or less than buffer size
+ while ((nread = is.read(buf)) > 0) {
+ size += nread;
+ }
} catch (IOException ex) {
size = -1L;
}
diff --git a/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java b/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java
index 6a4b176edd011..fdfe49ca901c9 100644
--- a/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java
+++ b/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java
@@ -35,34 +35,34 @@ public void tearDown() {
}
public void testConsumerAllocatesBufferLimit() throws IOException {
- consumer.consume(randomByteBufferOfLength(1000).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(1000).flip());
assertThat(consumer.getBuffer().capacity(), equalTo(1000));
}
public void testConsumerAllocatesEmptyBuffer() throws IOException {
- consumer.consume(ByteBuffer.allocate(0).flip());
+ consumer.consume((ByteBuffer) ByteBuffer.allocate(0).flip());
assertThat(consumer.getBuffer().capacity(), equalTo(0));
}
public void testConsumerExpandsBufferLimits() throws IOException {
- consumer.consume(randomByteBufferOfLength(1000).flip());
- consumer.consume(randomByteBufferOfLength(2000).flip());
- consumer.consume(randomByteBufferOfLength(3000).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(1000).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(2000).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(3000).flip());
assertThat(consumer.getBuffer().capacity(), equalTo(6000));
}
public void testConsumerAllocatesLimit() throws IOException {
- consumer.consume(randomByteBufferOfLength(BUFFER_LIMIT).flip());
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT).flip());
assertThat(consumer.getBuffer().capacity(), equalTo(BUFFER_LIMIT));
}
public void testConsumerFailsToAllocateOverLimit() throws IOException {
- assertThrows(ContentTooLongException.class, () -> consumer.consume(randomByteBufferOfLength(BUFFER_LIMIT + 1).flip()));
+ assertThrows(ContentTooLongException.class, () -> consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT + 1).flip()));
}
public void testConsumerFailsToExpandOverLimit() throws IOException {
- consumer.consume(randomByteBufferOfLength(BUFFER_LIMIT).flip());
- assertThrows(ContentTooLongException.class, () -> consumer.consume(randomByteBufferOfLength(1).flip()));
+ consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT).flip());
+ assertThrows(ContentTooLongException.class, () -> consumer.consume((ByteBuffer) randomByteBufferOfLength(1).flip()));
}
private static ByteBuffer randomByteBufferOfLength(int length) {
diff --git a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
index adddb3bda725c..f609fae4e3c81 100644
--- a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
+++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java
@@ -305,6 +305,7 @@ public void shutdown() {
}
}
+ @SuppressWarnings("removal")
static class SnifferThreadFactory implements ThreadFactory {
private final AtomicInteger threadNumber = new AtomicInteger(1);
private final String namePrefix;
diff --git a/client/test/build.gradle b/client/test/build.gradle
index f81a009389681..b77865df6decf 100644
--- a/client/test/build.gradle
+++ b/client/test/build.gradle
@@ -30,8 +30,8 @@
apply plugin: 'opensearch.build'
java {
- targetCompatibility = JavaVersion.VERSION_11
- sourceCompatibility = JavaVersion.VERSION_11
+ targetCompatibility = JavaVersion.VERSION_1_8
+ sourceCompatibility = JavaVersion.VERSION_1_8
}
base {
@@ -69,3 +69,10 @@ dependenciesInfo.enabled = false
//we aren't releasing this jar
thirdPartyAudit.enabled = false
test.enabled = false
+
+tasks.withType(JavaCompile) {
+ // Suppressing '[options] target value 8 is obsolete and will be removed in a future release'
+ configure(options) {
+ options.compilerArgs << '-Xlint:-options'
+ }
+}
diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java
index 88f667549f3e8..faef1441d0a02 100644
--- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java
+++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java
@@ -51,6 +51,7 @@ protected Matcher nodeNameMatcher() {
return is("integTest-0");
}
+ @SuppressWarnings("removal")
@Override
protected BufferedReader openReader(Path logFile) {
assumeFalse("Skipping test because it is being run against an external cluster.",
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index cb05661dc74a4..43c38c5ad0c67 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -63,7 +63,7 @@ import java.util.regex.Pattern
*/
plugins {
- id "com.netflix.nebula.ospackage-base" version "11.5.0"
+ id "com.netflix.nebula.ospackage-base" version "11.8.0"
}
void addProcessFilesTask(String type, boolean jdk) {
diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options
index 1a0abcbaf9c88..f0ac98faffda9 100644
--- a/distribution/src/config/jvm.options
+++ b/distribution/src/config/jvm.options
@@ -38,12 +38,12 @@
8-10:-XX:+UseCMSInitiatingOccupancyOnly
## G1GC Configuration
-# NOTE: G1 GC is only supported on JDK version 10 or later
-# to use G1GC, uncomment the next two lines and update the version on the
-# following three lines to your version of the JDK
-# 10:-XX:-UseConcMarkSweepGC
-# 10:-XX:-UseCMSInitiatingOccupancyOnly
+# NOTE: G1GC is the default GC for all JDKs 11 and newer
11-:-XX:+UseG1GC
+# See https://github.com/elastic/elasticsearch/pull/46169 for the history
+# behind these settings, but the tl;dr is that default values can lead
+# to situations where heap usage grows enough to trigger a circuit breaker
+# before GC kicks in.
11-:-XX:G1ReservePercent=25
11-:-XX:InitiatingHeapOccupancyPercent=30
diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml
index b7ab2e1c2309b..ebffdde0f3699 100644
--- a/distribution/src/config/opensearch.yml
+++ b/distribution/src/config/opensearch.yml
@@ -117,12 +117,6 @@ ${path.logs}
#opensearch.experimental.feature.extensions.enabled: false
#
#
-# Gates the concurrent segment search feature. This feature enables concurrent segment search in a separate
-# index searcher threadpool.
-#
-#opensearch.experimental.feature.concurrent_segment_search.enabled: false
-#
-#
# Gates the optimization of datetime formatters caching along with change in default datetime formatter
# Once there is no observed impact on performance, this feature flag can be removed.
#
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
deleted file mode 100644
index 79ed9e0c63fc8..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dc30995f7428c0a405eba9b8c619b20d2b3b9905
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..cbc65687606fc
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1
@@ -0,0 +1 @@
+fd441d574a71e7d10a4f73de6609f881d8cdfeec
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
deleted file mode 100644
index da00d281934b1..0000000000000
--- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7
\ No newline at end of file
diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..d231db4fd49fc
--- /dev/null
+++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1
@@ -0,0 +1 @@
+02a16efeb840c45af1e2f31753dfe76795278b73
\ No newline at end of file
diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle
index dfb4ddba24113..822b471e2e034 100644
--- a/gradle/code-coverage.gradle
+++ b/gradle/code-coverage.gradle
@@ -13,7 +13,7 @@ repositories {
gradlePluginPortal()
// TODO: Find the way to use the repositories from RepositoriesSetupPlugin
maven {
- url = "https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/"
+ url = "https://artifacts.opensearch.org/snapshots/lucene/"
}
}
diff --git a/gradle/formatting.gradle b/gradle/formatting.gradle
index 93e1127c97a56..f3a4bf5cc765b 100644
--- a/gradle/formatting.gradle
+++ b/gradle/formatting.gradle
@@ -99,7 +99,9 @@ allprojects {
}
}
format 'misc', {
- target '*.md', '*.gradle', '**/*.yaml', '**/*.yml', '**/*.svg'
+ target '*.md', '*.gradle', '**/*.json', '**/*.yaml', '**/*.yml', '**/*.svg'
+
+ targetExclude '**/simple-bulk11.json', '**/simple-msearch5.json'
trimTrailingWhitespace()
endWithNewline()
diff --git a/gradle/run.gradle b/gradle/run.gradle
index 639479e97d28f..34651f1d94964 100644
--- a/gradle/run.gradle
+++ b/gradle/run.gradle
@@ -39,6 +39,12 @@ testClusters {
testDistribution = 'archive'
if (numZones > 1) numberOfZones = numZones
if (numNodes > 1) numberOfNodes = numNodes
+ if (findProperty("installedPlugins")) {
+ installedPlugins = Eval.me(installedPlugins)
+ for (String p : installedPlugins) {
+ plugin('plugins:'.concat(p))
+ }
+ }
}
}
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index f1d76d80bbfa3..82a4add334a7d 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b
+distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d
diff --git a/libs/common/build.gradle b/libs/common/build.gradle
index 4f89b81636420..60bf488833393 100644
--- a/libs/common/build.gradle
+++ b/libs/common/build.gradle
@@ -43,3 +43,64 @@ tasks.named('forbiddenApisMain').configure {
// TODO: Need to decide how we want to handle for forbidden signatures with the changes to server
replaceSignatureFiles 'jdk-signatures'
}
+
+// Add support for incubator modules on supported Java versions.
+if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) {
+ sourceSets {
+ java20 {
+ java {
+ srcDirs = ['src/main/java20']
+ }
+ }
+ }
+
+ configurations {
+ java20Implementation.extendsFrom(implementation)
+ }
+
+ dependencies {
+ java20Implementation sourceSets.main.output
+ }
+
+ compileJava20Java {
+ targetCompatibility = JavaVersion.VERSION_20
+ options.compilerArgs += ['--add-modules', 'jdk.incubator.vector']
+ options.compilerArgs -= '-Werror' // use of incubator modules is reported as a warning
+ }
+
+ jar {
+ metaInf {
+ into 'versions/20'
+ from sourceSets.java20.output
+ }
+ manifest.attributes('Multi-Release': 'true')
+ }
+
+ tasks.withType(Test).configureEach {
+ // Relying on the convention for Test.classpath in custom Test tasks has been deprecated
+ // and scheduled to be removed in Gradle 9.0. Below lines are added from the migration guide:
+ // https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#test_task_default_classpath
+ testClassesDirs = testing.suites.test.sources.output.classesDirs
+ classpath = testing.suites.test.sources.runtimeClasspath
+
+ // Adds the multi-release JAR to the classpath when executing tests.
+ // This allows newer sources to be picked up at test runtime (if supported).
+ classpath += files(jar.archiveFile)
+ // Removes the "main" sources from the classpath to avoid JarHell problems as
+ // the multi-release JAR already contains those classes.
+ classpath -= sourceSets.main.output
+ }
+
+ tasks.register('roundableSimdTest', Test) {
+ group 'verification'
+ include '**/RoundableTests.class'
+ systemProperty 'opensearch.experimental.feature.simd.rounding.enabled', 'forced'
+ }
+
+ check.dependsOn(roundableSimdTest)
+
+ forbiddenApisJava20 {
+ failOnMissingClasses = false
+ ignoreSignaturesOfMissingClasses = true
+ }
+}
diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
index 1864aec4aa951..569f48a8465f3 100644
--- a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
+++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java
@@ -113,7 +113,7 @@ private void process(ExecutableElement executable, Element enclosing) {
// The executable element should not be internal (unless constructor for injectable core component)
checkNotInternal(enclosing, executable);
- // Check this elements annotations
+ // Check this element's annotations
for (final AnnotationMirror annotation : executable.getAnnotationMirrors()) {
final Element element = annotation.getAnnotationType().asElement();
if (inspectable(element)) {
@@ -210,7 +210,7 @@ private void process(ExecutableElement executable, ReferenceType ref) {
}
}
- // Check this elements annotations
+ // Check this element's annotations
for (final AnnotationMirror annotation : ref.getAnnotationMirrors()) {
final Element element = annotation.getAnnotationType().asElement();
if (inspectable(element)) {
@@ -316,7 +316,7 @@ private void checkPublic(@Nullable Element referencedBy, final Element element)
reportFailureAs,
"The element "
+ element
- + " is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ + " is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi"
+ ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "")
);
}
diff --git a/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java
new file mode 100644
index 0000000000000..626fb6e6b810e
--- /dev/null
+++ b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java
@@ -0,0 +1,100 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+import jdk.incubator.vector.LongVector;
+import jdk.incubator.vector.Vector;
+import jdk.incubator.vector.VectorOperators;
+import jdk.incubator.vector.VectorSpecies;
+
+/**
+ * It uses vectorized B-tree search to find the round-down point.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+class BtreeSearcher implements Roundable {
+ private static final VectorSpecies LONG_VECTOR_SPECIES = LongVector.SPECIES_PREFERRED;
+ private static final int LANES = LONG_VECTOR_SPECIES.length();
+ private static final int SHIFT = log2(LANES);
+
+ private final long[] values;
+ private final long minValue;
+
+ BtreeSearcher(long[] values, int size) {
+ if (size <= 0) {
+ throw new IllegalArgumentException("at least one value must be present");
+ }
+
+ int blocks = (size + LANES - 1) / LANES; // number of blocks
+ int length = 1 + blocks * LANES; // size of the backing array (1-indexed)
+
+ this.minValue = values[0];
+ this.values = new long[length];
+ build(values, 0, size, this.values, 1);
+ }
+
+ /**
+ * Builds the B-tree memory layout.
+ * It builds the tree recursively, following an in-order traversal.
+ *
+ *
+ * Each block stores 'lanes' values at indices {@code i, i + 1, ..., i + lanes - 1} where {@code i} is the
+ * starting offset. The starting offset of the root block is 1. The branching factor is (1 + lanes) so each
+ * block can have these many children. Given the starting offset {@code i} of a block, the starting offset
+ * of its k-th child (ranging from {@code 0, 1, ..., k}) can be computed as {@code i + ((i + k) << shift)}.
+ *
+ * @param src is the sorted input array
+ * @param i is the index in the input array to read the value from
+ * @param size the number of values in the input array
+ * @param dst is the output array
+ * @param j is the index in the output array to write the value to
+ * @return the next index 'i'
+ */
+ private static int build(long[] src, int i, int size, long[] dst, int j) {
+ if (j < dst.length) {
+ for (int k = 0; k < LANES; k++) {
+ i = build(src, i, size, dst, j + ((j + k) << SHIFT));
+
+ // Fills the B-tree as a complete tree, i.e., all levels are completely filled,
+ // except the last level which is filled from left to right.
+ // The trick is to fill the destination array between indices 1...size (inclusive / 1-indexed)
+ // and pad the remaining array with +infinity.
+ dst[j + k] = (j + k <= size) ? src[i++] : Long.MAX_VALUE;
+ }
+ i = build(src, i, size, dst, j + ((j + LANES) << SHIFT));
+ }
+ return i;
+ }
+
+ @Override
+ public long floor(long key) {
+ Vector keyVector = LongVector.broadcast(LONG_VECTOR_SPECIES, key);
+ int i = 1, result = 1;
+
+ while (i < values.length) {
+ Vector valuesVector = LongVector.fromArray(LONG_VECTOR_SPECIES, values, i);
+ int j = i + valuesVector.compare(VectorOperators.GT, keyVector).firstTrue();
+ result = (j > i) ? j : result;
+ i += (j << SHIFT);
+ }
+
+ assert result > 1 : "key must be greater than or equal to " + minValue;
+ return values[result - 1];
+ }
+
+ private static int log2(int num) {
+ if ((num <= 0) || ((num & (num - 1)) != 0)) {
+ throw new IllegalArgumentException(num + " is not a positive power of 2");
+ }
+ return 32 - Integer.numberOfLeadingZeros(num - 1);
+ }
+}
diff --git a/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java
new file mode 100644
index 0000000000000..0709ed4374227
--- /dev/null
+++ b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java
@@ -0,0 +1,75 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.round;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Factory class to create and return the fastest implementation of {@link Roundable}.
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+public final class RoundableFactory {
+ /**
+ * The maximum limit up to which linear search is used, otherwise binary or B-tree search is used.
+ * This is because linear search is much faster on small arrays.
+ * Benchmark results: PR #9727
+ */
+ private static final int LINEAR_SEARCH_MAX_SIZE = 64;
+
+ /**
+ * Indicates whether the vectorized (SIMD) B-tree search implementation is to be used.
+ * It is true when either:
+ * 1. The feature flag is set to "forced", or
+ * 2. The platform has a minimum of 4 long vector lanes and the feature flag is set to "true".
+ */
+ private static final boolean USE_BTREE_SEARCHER;
+
+ /**
+ * This class is initialized only when:
+ * - JDK-20+
+ * - jdk.incubator.vector.LongVector is available (--add-modules=jdk.incubator.vector is passed)
+ */
+ private static final class VectorCheck {
+ final static int SPECIES_PREFERRED = jdk.incubator.vector.LongVector.SPECIES_PREFERRED.length();
+ }
+
+ static {
+ String simdRoundingFeatureFlag = System.getProperty("opensearch.experimental.feature.simd.rounding.enabled");
+ boolean useBtreeSearcher = false;
+
+ try {
+ final Class> incubator = Class.forName("jdk.incubator.vector.LongVector");
+
+ useBtreeSearcher = "forced".equalsIgnoreCase(simdRoundingFeatureFlag)
+ || (VectorCheck.SPECIES_PREFERRED >= 4 && "true".equalsIgnoreCase(simdRoundingFeatureFlag));
+
+ } catch (final ClassNotFoundException ex) {
+ /* do not use BtreeSearcher */
+ }
+
+ USE_BTREE_SEARCHER = useBtreeSearcher;
+ }
+
+ private RoundableFactory() {}
+
+ /**
+ * Creates and returns the fastest implementation of {@link Roundable}.
+ */
+ public static Roundable create(long[] values, int size) {
+ if (size <= LINEAR_SEARCH_MAX_SIZE) {
+ return new BidirectionalLinearSearcher(values, size);
+ } else if (USE_BTREE_SEARCHER) {
+ return new BtreeSearcher(values, size);
+ } else {
+ return new BinarySearcher(values, size);
+ }
+ }
+}
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
index df04709458b29..8d8a4c7895339 100644
--- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java
@@ -35,7 +35,7 @@ public void testPublicApiMethodArgumentNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotated)"
)
)
@@ -56,7 +56,7 @@ public void testPublicApiMethodArgumentNotAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedGenerics)"
)
)
@@ -77,7 +77,7 @@ public void testPublicApiMethodThrowsNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodThrowsNotAnnotated)"
)
)
@@ -111,7 +111,7 @@ public void testPublicApiMethodArgumentNotAnnotatedPackagePrivate() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)"
)
)
@@ -209,7 +209,7 @@ public void testPublicApiMethodReturnNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotated)"
)
)
@@ -230,7 +230,7 @@ public void testPublicApiMethodReturnNotAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedGenerics)"
)
)
@@ -251,7 +251,7 @@ public void testPublicApiMethodReturnNotAnnotatedArray() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedArray)"
)
)
@@ -272,7 +272,7 @@ public void testPublicApiMethodReturnNotAnnotatedBoundedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedBoundedGenerics)"
)
)
@@ -297,7 +297,7 @@ public void testPublicApiMethodReturnNotAnnotatedAnnotation() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedAnnotation)"
)
)
@@ -388,7 +388,7 @@ public void testPublicApiMethodGenericsArgumentNotAnnotated() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodGenericsArgumentNotAnnotated)"
)
)
@@ -453,7 +453,7 @@ public void testPublicApiMethodReturnAnnotatedGenerics() {
matching(
Diagnostic.Kind.ERROR,
containsString(
- "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi "
+ "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnAnnotatedGenerics)"
)
)
diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
index dcf8dd7945012..c8fdb3333a714 100644
--- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
+++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java
@@ -43,6 +43,7 @@ default CompilerResult compile(String name, String... names) {
return compileWithPackage(ApiAnnotationProcessorTests.class.getPackageName(), name, names);
}
+ @SuppressWarnings("removal")
default CompilerResult compileWithPackage(String pck, String name, String... names) {
final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
final DiagnosticCollector collector = new DiagnosticCollector<>();
diff --git a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
index ae9f629c59024..ad19f456b0df4 100644
--- a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
+++ b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java
@@ -12,15 +12,31 @@
public class RoundableTests extends OpenSearchTestCase {
- public void testFloor() {
- int size = randomIntBetween(1, 256);
- long[] values = new long[size];
- for (int i = 1; i < values.length; i++) {
- values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1;
- }
+ public void testRoundingEmptyArray() {
+ Throwable throwable = assertThrows(IllegalArgumentException.class, () -> RoundableFactory.create(new long[0], 0));
+ assertEquals("at least one value must be present", throwable.getMessage());
+ }
+
+ public void testRoundingSmallArray() {
+ int size = randomIntBetween(1, 64);
+ long[] values = randomArrayOfSortedValues(size);
+ Roundable roundable = RoundableFactory.create(values, size);
+
+ assertEquals("BidirectionalLinearSearcher", roundable.getClass().getSimpleName());
+ assertRounding(roundable, values, size);
+ }
- Roundable[] impls = { new BinarySearcher(values, size), new BidirectionalLinearSearcher(values, size) };
+ public void testRoundingLargeArray() {
+ int size = randomIntBetween(65, 256);
+ long[] values = randomArrayOfSortedValues(size);
+ Roundable roundable = RoundableFactory.create(values, size);
+ boolean useBtreeSearcher = "forced".equalsIgnoreCase(System.getProperty("opensearch.experimental.feature.simd.rounding.enabled"));
+ assertEquals(useBtreeSearcher ? "BtreeSearcher" : "BinarySearcher", roundable.getClass().getSimpleName());
+ assertRounding(roundable, values, size);
+ }
+
+ private void assertRounding(Roundable roundable, long[] values, int size) {
for (int i = 0; i < 100000; i++) {
// Index of the expected round-down point.
int idx = randomIntBetween(0, size - 1);
@@ -35,23 +51,21 @@ public void testFloor() {
// round-down point, which will still floor to the same value.
long key = expected + (randomNonNegativeLong() % delta);
- for (Roundable roundable : impls) {
- assertEquals(expected, roundable.floor(key));
- }
+ assertEquals(expected, roundable.floor(key));
}
+
+ Throwable throwable = assertThrows(AssertionError.class, () -> roundable.floor(values[0] - 1));
+ assertEquals("key must be greater than or equal to " + values[0], throwable.getMessage());
}
- public void testFailureCases() {
- Throwable throwable;
+ private static long[] randomArrayOfSortedValues(int size) {
+ int capacity = size + randomInt(20); // May be slightly more than the size.
+ long[] values = new long[capacity];
- throwable = assertThrows(IllegalArgumentException.class, () -> new BinarySearcher(new long[0], 0));
- assertEquals("at least one value must be present", throwable.getMessage());
- throwable = assertThrows(IllegalArgumentException.class, () -> new BidirectionalLinearSearcher(new long[0], 0));
- assertEquals("at least one value must be present", throwable.getMessage());
+ for (int i = 1; i < size; i++) {
+ values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1;
+ }
- throwable = assertThrows(AssertionError.class, () -> new BinarySearcher(new long[] { 100 }, 1).floor(50));
- assertEquals("key must be greater than or equal to 100", throwable.getMessage());
- throwable = assertThrows(AssertionError.class, () -> new BidirectionalLinearSearcher(new long[] { 100 }, 1).floor(50));
- assertEquals("key must be greater than or equal to 100", throwable.getMessage());
+ return values;
}
}
diff --git a/libs/core/build.gradle b/libs/core/build.gradle
index 4850b5aea5c85..0cf2cd0bf92b6 100644
--- a/libs/core/build.gradle
+++ b/libs/core/build.gradle
@@ -36,45 +36,6 @@ base {
archivesName = 'opensearch-core'
}
-// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs
-if (!isEclipse) {
- sourceSets {
- java11 {
- java {
- srcDirs = ['src/main/java11']
- }
- }
- }
-
- configurations {
- java11Compile.extendsFrom(compile)
- }
-
- dependencies {
- java11Implementation sourceSets.main.output
- }
-
- compileJava11Java {
- sourceCompatibility = JavaVersion.VERSION_11
- targetCompatibility = JavaVersion.VERSION_11
- }
-
- forbiddenApisJava11 {
- if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) {
- targetCompatibility = JavaVersion.VERSION_11
- }
- replaceSignatureFiles 'jdk-signatures'
- }
-
- jar {
- metaInf {
- into 'versions/11'
- from sourceSets.java11.output
- }
- manifest.attributes('Multi-Release': 'true')
- }
-}
-
dependencies {
api project(':libs:opensearch-common')
diff --git a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 b/libs/core/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/libs/core/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/core/licenses/jackson-core-2.16.1.jar.sha1 b/libs/core/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/libs/core/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0.jar.sha1
deleted file mode 100644
index f9a3e2f3cbee6..0000000000000
--- a/libs/core/licenses/lucene-core-9.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e8421c5f8573bcf22e9265fc7e19469545a775a
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.9.2.jar.sha1 b/libs/core/licenses/lucene-core-9.9.2.jar.sha1
new file mode 100644
index 0000000000000..2d03d69369b9f
--- /dev/null
+++ b/libs/core/licenses/lucene-core-9.9.2.jar.sha1
@@ -0,0 +1 @@
+7699f80220fc80b08413902560904623b88beb9f
\ No newline at end of file
diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java
index d94be3f25b53d..307da89c18d48 100644
--- a/libs/core/src/main/java/org/opensearch/Version.java
+++ b/libs/core/src/main/java/org/opensearch/Version.java
@@ -98,8 +98,8 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0);
- public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0);
- public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0);
+ public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_2);
+ public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_2);
public static final Version CURRENT = V_3_0_0;
public static Version fromId(int id) {
diff --git a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
index c0abad7ed727f..1e48cf1f476da 100644
--- a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
+++ b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java
@@ -32,6 +32,7 @@
package org.opensearch.core.index.shard;
+import org.apache.lucene.util.RamUsageEstimator;
import org.opensearch.common.annotation.PublicApi;
import org.opensearch.core.common.Strings;
import org.opensearch.core.common.io.stream.StreamInput;
@@ -55,6 +56,8 @@ public class ShardId implements Comparable, ToXContentFragment, Writeab
private final int shardId;
private final int hashCode;
+ private final static long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ShardId.class);
+
/**
* Constructs a new shard id.
* @param index the index name
@@ -88,6 +91,10 @@ public ShardId(StreamInput in) throws IOException {
hashCode = computeHashCode();
}
+ public long getBaseRamBytesUsed() {
+ return BASE_RAM_BYTES_USED;
+ }
+
/**
* Writes this shard id to a stream.
* @param out the stream to write to
diff --git a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java
index 898ce7e4e913b..ab48cc2357e7f 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java
@@ -129,6 +129,7 @@ private void configureSocket(ServerSocket socket) throws IOException {
socket.setReuseAddress(config.tcpReuseAddress());
}
+ @SuppressWarnings("removal")
protected static SocketChannel accept(ServerSocketChannel serverSocketChannel) throws IOException {
try {
assert serverSocketChannel.isBlocking() == false;
diff --git a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
index 3df8e42fe4f14..530aa1d86afc7 100644
--- a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
+++ b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java
@@ -388,6 +388,7 @@ private void configureSocket(Socket socket, boolean isConnectComplete) throws IO
}
}
+ @SuppressWarnings("removal")
private static void connect(SocketChannel socketChannel, InetSocketAddress remoteAddress) throws IOException {
try {
AccessController.doPrivileged((PrivilegedExceptionAction) () -> socketChannel.connect(remoteAddress));
diff --git a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java
index 4a200a5dfa9bd..969fa91b50538 100644
--- a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java
+++ b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java
@@ -65,6 +65,7 @@ protected Class> findClass(String name) throws ClassNotFoundException {
/**
* Return a new classloader across the parent and extended loaders.
*/
+ @SuppressWarnings("removal")
public static ExtendedPluginsClassLoader create(ClassLoader parent, List extendedLoaders) {
return AccessController.doPrivileged(
(PrivilegedAction) () -> new ExtendedPluginsClassLoader(parent, extendedLoaders)
diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java
index f41c49844997d..a2531f4a9156e 100644
--- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java
+++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java
@@ -76,6 +76,7 @@
* @see
* http://cs.oswego.edu/pipermail/concurrency-interest/2009-August/006508.html
*/
+@SuppressWarnings("removal")
public class SecureSM extends SecurityManager {
private final String[] classesThatCanExit;
diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java
index fe239fea8129e..3c8e78a902fcb 100644
--- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java
+++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java
@@ -18,6 +18,7 @@
import java.util.concurrent.ForkJoinPool.ForkJoinWorkerThreadFactory;
import java.util.concurrent.ForkJoinWorkerThread;
+@SuppressWarnings("removal")
public class SecuredForkJoinWorkerThreadFactory implements ForkJoinWorkerThreadFactory {
static AccessControlContext contextWithPermissions(Permission... perms) {
Permissions permissions = new Permissions();
diff --git a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java
index 026ffb080ee61..fd666c70cfebb 100644
--- a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java
+++ b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java
@@ -41,6 +41,7 @@
import junit.framework.TestCase;
/** Simple tests for SecureSM */
+@SuppressWarnings("removal")
public class SecureSMTests extends TestCase {
static {
// install a mock security policy:
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
index d57def9406b17..f38fdd6412d79 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
@@ -34,6 +34,11 @@ public Counter createUpDownCounter(String name, String description, String unit)
return metricsTelemetry.createUpDownCounter(name, description, unit);
}
+ @Override
+ public Histogram createHistogram(String name, String description, String unit) {
+ return metricsTelemetry.createHistogram(name, description, unit);
+ }
+
@Override
public void close() throws IOException {
metricsTelemetry.close();
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java
new file mode 100644
index 0000000000000..95ada626e21ee
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * Histogram records the value for an existing metric.
+ * {@opensearch.experimental}
+ */
+@ExperimentalApi
+public interface Histogram {
+
+ /**
+ * record value.
+ * @param value value to be added.
+ */
+ void record(double value);
+
+ /**
+ * record value along with the attributes.
+ *
+ * @param value value to be added.
+ * @param tags attributes/dimensions of the metric.
+ */
+ void record(double value, Tags tags);
+
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
index 61b3df089928b..94d19bda31f34 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
@@ -36,4 +36,15 @@ public interface MetricsRegistry extends Closeable {
* @return counter.
*/
Counter createUpDownCounter(String name, String description, String unit);
+
+ /**
+ * Creates the histogram type of Metric. Implementation framework will take care
+ * of the bucketing strategy.
+ *
+ * @param name name of the histogram.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @return histogram.
+ */
+ Histogram createHistogram(String name, String description, String unit);
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java
new file mode 100644
index 0000000000000..20e72bccad899
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java
@@ -0,0 +1,38 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics.noop;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.telemetry.metrics.Histogram;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * No-op {@link Histogram}
+ * {@opensearch.internal}
+ */
+@InternalApi
+public class NoopHistogram implements Histogram {
+
+ /**
+ * No-op Histogram instance
+ */
+ public final static NoopHistogram INSTANCE = new NoopHistogram();
+
+ private NoopHistogram() {}
+
+ @Override
+ public void record(double value) {
+
+ }
+
+ @Override
+ public void record(double value, Tags tags) {
+
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
index 640c6842a8960..d3dda68cfae71 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
@@ -10,6 +10,7 @@
import org.opensearch.common.annotation.InternalApi;
import org.opensearch.telemetry.metrics.Counter;
+import org.opensearch.telemetry.metrics.Histogram;
import org.opensearch.telemetry.metrics.MetricsRegistry;
import java.io.IOException;
@@ -38,6 +39,11 @@ public Counter createUpDownCounter(String name, String description, String unit)
return NoopCounter.INSTANCE;
}
+ @Override
+ public Histogram createHistogram(String name, String description, String unit) {
+ return NoopHistogram.INSTANCE;
+ }
+
@Override
public void close() throws IOException {
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java
index decbf49f795c4..93600da510977 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java
@@ -21,6 +21,7 @@
class DefaultSpanScope implements SpanScope {
private final Span span;
private final SpanScope previousSpanScope;
+ private final Span beforeSpan;
private static final ThreadLocal spanScopeThreadLocal = new ThreadLocal<>();
private final TracerContextStorage tracerContextStorage;
@@ -29,8 +30,14 @@ class DefaultSpanScope implements SpanScope {
* @param span span
* @param previousSpanScope before attached span scope.
*/
- private DefaultSpanScope(Span span, SpanScope previousSpanScope, TracerContextStorage tracerContextStorage) {
+ private DefaultSpanScope(
+ Span span,
+ final Span beforeSpan,
+ SpanScope previousSpanScope,
+ TracerContextStorage tracerContextStorage
+ ) {
this.span = Objects.requireNonNull(span);
+ this.beforeSpan = beforeSpan;
this.previousSpanScope = previousSpanScope;
this.tracerContextStorage = tracerContextStorage;
}
@@ -43,7 +50,8 @@ private DefaultSpanScope(Span span, SpanScope previousSpanScope, TracerContextSt
*/
public static SpanScope create(Span span, TracerContextStorage tracerContextStorage) {
final SpanScope beforeSpanScope = spanScopeThreadLocal.get();
- SpanScope newSpanScope = new DefaultSpanScope(span, beforeSpanScope, tracerContextStorage);
+ final Span beforeSpan = tracerContextStorage.get(TracerContextStorage.CURRENT_SPAN);
+ SpanScope newSpanScope = new DefaultSpanScope(span, beforeSpan, beforeSpanScope, tracerContextStorage);
return newSpanScope;
}
@@ -61,8 +69,8 @@ public SpanScope attach() {
private void detach() {
spanScopeThreadLocal.set(previousSpanScope);
- if (previousSpanScope != null) {
- tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, previousSpanScope.getSpan());
+ if (beforeSpan != null) {
+ tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, beforeSpan);
} else {
tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, null);
}
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
index 6171641db5f07..02f126075845b 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
@@ -48,4 +48,15 @@ public void testUpDownCounter() {
assertSame(mockCounter, counter);
}
+ public void testHistogram() {
+ Histogram mockHistogram = mock(Histogram.class);
+ when(defaultMeterRegistry.createHistogram(any(String.class), any(String.class), any(String.class))).thenReturn(mockHistogram);
+ Histogram histogram = defaultMeterRegistry.createHistogram(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testHistogram",
+ "test up-down counter",
+ "ms"
+ );
+ assertSame(mockHistogram, histogram);
+ }
+
}
diff --git a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
deleted file mode 100644
index c2b70fb4ae202..0000000000000
--- a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-899e5cf01be55fbf094ad72b2edb0c5df99111ee
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..908d071b34a2a
--- /dev/null
+++ b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1
@@ -0,0 +1 @@
+9456bb3cdd0f79f91a5f730a1b1bb041a380c91f
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
deleted file mode 100644
index 8da478fc6013d..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-35e8b7bf4fc1d078766bb155103d433ed5bb1627
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..b4b781f604910
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1
@@ -0,0 +1 @@
+1be7098dccc079171464dca7e386bd8df623b031
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
deleted file mode 100644
index 3e952ffe92418..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3c422d7f3901c9a1becf9df3cf41efc68a5ab95c
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..ad91e748ebe94
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1
@@ -0,0 +1 @@
+c4ddbc5277670f2e56b1f5e44e83afa748bcb125
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
deleted file mode 100644
index d62b5874ab023..0000000000000
--- a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2033e2c5f531785d17f3a2bc31842e3bbb7983b2
\ No newline at end of file
diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1
new file mode 100644
index 0000000000000..9b30e7bf921b2
--- /dev/null
+++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1
@@ -0,0 +1 @@
+8e4f1923d73cd55f2b4c0d56ee4ed80419297354
\ No newline at end of file
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
new file mode 100644
index 0000000000000..4c05f0058f2ed
--- /dev/null
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.common.xcontent;
+
+import com.fasterxml.jackson.core.StreamReadConstraints;
+
+import org.opensearch.common.annotation.InternalApi;
+
+/**
+ * Consolidates the XContent constraints (primarily reflecting Jackson's {@link StreamReadConstraints} constraints)
+ *
+ * @opensearch.internal
+ */
+@InternalApi
+public interface XContentContraints {
+ final String DEFAULT_MAX_STRING_LEN_PROPERTY = "opensearch.xcontent.string.length.max";
+ final String DEFAULT_MAX_NAME_LEN_PROPERTY = "opensearch.xcontent.name.length.max";
+ final String DEFAULT_MAX_DEPTH_PROPERTY = "opensearch.xcontent.depth.max";
+
+ final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(System.getProperty(DEFAULT_MAX_STRING_LEN_PROPERTY, "50000000" /* ~50 Mb */));
+
+ final int DEFAULT_MAX_NAME_LEN = Integer.parseInt(
+ System.getProperty(DEFAULT_MAX_NAME_LEN_PROPERTY, "50000" /* StreamReadConstraints.DEFAULT_MAX_NAME_LEN */)
+ );
+
+ final int DEFAULT_MAX_DEPTH = Integer.parseInt(
+ System.getProperty(DEFAULT_MAX_DEPTH_PROPERTY, "1000" /* StreamReadConstraints.DEFAULT_MAX_DEPTH */)
+ );
+}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
index 81f8fe9b6366f..7e92f236213d4 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java
@@ -37,8 +37,10 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.cbor.CBORFactory;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -58,11 +60,7 @@
/**
* A CBOR based content implementation using Jackson.
*/
-public class CborXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class CborXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(cborXContent);
}
@@ -76,7 +74,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method
cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- cborFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ cborFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ cborFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
cborFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
cborXContent = new CborXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
index 4bd7c4c99bb46..91f6bbeb4f786 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java
@@ -38,7 +38,9 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -57,11 +59,7 @@
/**
* A JSON based content implementation using Jackson.
*/
-public class JsonXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class JsonXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(jsonXContent);
}
@@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- jsonFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ jsonFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ jsonFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
jsonFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
jsonXContent = new JsonXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
index e824d4e1ae991..c73e126102a80 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java
@@ -37,9 +37,11 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -58,11 +60,7 @@
/**
* A Smile based content implementation using Jackson.
*/
-public class SmileXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class SmileXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(smileXContent);
}
@@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException {
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method
smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- smileFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ smileFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ smileFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
smileFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
smileXContent = new SmileXContent();
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
index 0ad3c44e0168a..3f6a4b3aeead7 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
@@ -36,8 +36,10 @@
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.StreamReadConstraints;
import com.fasterxml.jackson.core.StreamReadFeature;
+import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.core.xcontent.DeprecationHandler;
import org.opensearch.core.xcontent.MediaType;
@@ -56,11 +58,7 @@
/**
* A YAML based content implementation using Jackson.
*/
-public class YamlXContent implements XContent {
- public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(
- System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */)
- );
-
+public class YamlXContent implements XContent, XContentContraints {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(yamlXContent);
}
@@ -71,7 +69,14 @@ public static XContentBuilder contentBuilder() throws IOException {
static {
yamlFactory = new YAMLFactory();
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
- yamlFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build());
+ yamlFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
+ yamlFactory.setStreamReadConstraints(
+ StreamReadConstraints.builder()
+ .maxStringLength(DEFAULT_MAX_STRING_LEN)
+ .maxNameLength(DEFAULT_MAX_NAME_LEN)
+ .maxNestingDepth(DEFAULT_MAX_DEPTH)
+ .build()
+ );
yamlFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true);
yamlXContent = new YamlXContent();
}
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
index d3d9ea174cf1b..0e431d8ea4277 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
@@ -40,6 +40,7 @@
import org.opensearch.common.xcontent.cbor.CborXContent;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.common.xcontent.smile.SmileXContent;
+import org.opensearch.common.xcontent.yaml.YamlXContent;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.core.xcontent.XContentParseException;
@@ -48,16 +49,20 @@
import org.opensearch.test.OpenSearchTestCase;
import java.io.IOException;
+import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
+import java.util.zip.GZIPInputStream;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
+import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -67,6 +72,7 @@
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assume.assumeThat;
import static org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage;
public class XContentParserTests extends OpenSearchTestCase {
@@ -94,6 +100,50 @@ public class XContentParserTests extends OpenSearchTestCase {
() -> randomRealisticUnicodeOfCodepointLength(3145730)
);
+ private static final Map> FIELD_NAME_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomAlphaOfLengthBetween(1, JsonXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.CBOR,
+ () -> randomAlphaOfLengthBetween(1, CborXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.SMILE,
+ () -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_NAME_LEN),
+ XContentType.YAML,
+ () -> randomAlphaOfLengthBetween(1, YamlXContent.DEFAULT_MAX_NAME_LEN)
+ );
+
+ private static final Map> FIELD_NAME_OFF_LIMIT_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomAlphaOfLength(JsonXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.CBOR,
+ () -> randomAlphaOfLength(CborXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.SMILE,
+ () -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_NAME_LEN + 1),
+ XContentType.YAML,
+ () -> randomAlphaOfLength(YamlXContent.DEFAULT_MAX_NAME_LEN + 1)
+ );
+
+ private static final Map> DEPTH_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> randomIntBetween(1, JsonXContent.DEFAULT_MAX_DEPTH),
+ XContentType.CBOR,
+ () -> randomIntBetween(1, CborXContent.DEFAULT_MAX_DEPTH),
+ XContentType.SMILE,
+ () -> randomIntBetween(1, SmileXContent.DEFAULT_MAX_DEPTH),
+ XContentType.YAML,
+ () -> randomIntBetween(1, YamlXContent.DEFAULT_MAX_DEPTH)
+ );
+
+ private static final Map> OFF_LIMIT_DEPTH_GENERATORS = Map.of(
+ XContentType.JSON,
+ () -> JsonXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.CBOR,
+ () -> CborXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.SMILE,
+ () -> SmileXContent.DEFAULT_MAX_DEPTH + 1,
+ XContentType.YAML,
+ () -> YamlXContent.DEFAULT_MAX_DEPTH + 1
+ );
+
public void testStringOffLimit() throws IOException {
final XContentType xContentType = randomFrom(XContentType.values());
@@ -155,6 +205,188 @@ public void testString() throws IOException {
}
}
+ public void testFieldNameOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = FIELD_NAME_OFF_LIMIT_GENERATORS.get(xContentType).get();
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ // See please https://github.com/FasterXML/jackson-dataformats-binary/issues/392, support
+ // for CBOR, Smile is coming
+ if (xContentType != XContentType.JSON) {
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ assertNull(parser.nextToken());
+ } else {
+ assertThrows(StreamConstraintsException.class, () -> parser.nextToken());
+ }
+ }
+ }
+ }
+
+ public void testFieldName() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = FIELD_NAME_GENERATORS.get(xContentType).get();
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ assertNull(parser.nextToken());
+ }
+ }
+ }
+
+ public void testWriteDepthOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ // Branching off YAML logic into separate test case testWriteDepthOffLimitYaml since it behaves differently
+ assumeThat(xContentType, not(XContentType.YAML));
+
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ // The behavior here is very interesting: the generator does write the new object tag (changing the internal state)
+ // BUT throws the exception after the fact, this is why we have to close the object at the end.
+ assertThrows(StreamConstraintsException.class, () -> builder.startObject());
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+
+ builder.endObject();
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.endObject();
+ }
+ }
+ }
+
+ public void testWriteDepthOffLimitYaml() throws IOException {
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ try (XContentBuilder builder = XContentBuilder.builder(XContentType.YAML.xContent())) {
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(XContentType.YAML).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ // The behavior here is very interesting: the generator does write the new object tag (changing the internal state)
+ // BUT throws the exception after the fact, this is why we have to close the object at the end.
+ assertThrows(StreamConstraintsException.class, () -> builder.startObject());
+ } catch (final IllegalStateException ex) {
+ // YAML parser is having really hard time recovering from StreamConstraintsException, the internal
+ // state seems to be completely messed up and the closing cleanly seems to be not feasible.
+ }
+ }
+
+ public void testReadDepthOffLimit() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ // Since parser and generator use the same max depth constraints, we could not generate the content with off limits,
+ // using precreated test files instead.
+ try (
+ InputStream in = new GZIPInputStream(
+ getDataInputStream("depth-off-limit." + xContentType.name().toLowerCase(Locale.US) + ".gz")
+ )
+ ) {
+ try (XContentParser parser = createParser(xContentType.xContent(), in)) {
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ }
+
+ if (xContentType != XContentType.YAML) {
+ assertThrows(StreamConstraintsException.class, () -> parser.nextToken());
+ }
+ }
+ }
+ }
+
+ public void testDepth() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+
+ final String field = randomAlphaOfLengthBetween(1, 5);
+ final String value = randomAlphaOfLengthBetween(1, 5);
+
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ final int maxDepth = DEPTH_GENERATORS.get(xContentType).get() - 1;
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.startObject();
+ builder.field(field + depth);
+ }
+
+ builder.startObject();
+ if (randomBoolean()) {
+ builder.field(field, value);
+ } else {
+ builder.field(field).value(value);
+ }
+ builder.endObject();
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ builder.endObject();
+ }
+
+ try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) {
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field + depth, parser.currentName());
+ }
+
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals(field, parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+
+ for (int depth = 0; depth < maxDepth; ++depth) {
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ }
+
+ assertNull(parser.nextToken());
+ }
+ }
+ }
+
public void testFloat() throws IOException {
final XContentType xContentType = randomFrom(XContentType.values());
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz
new file mode 100644
index 0000000000000..88de7e590e7f0
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz
new file mode 100644
index 0000000000000..76274910542ac
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz
new file mode 100644
index 0000000000000..e248778b37253
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz differ
diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz
new file mode 100644
index 0000000000000..3b36594482a68
Binary files /dev/null and b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz differ
diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
index 71af708f2e1dc..648536f9136a8 100644
--- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
+++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java
@@ -36,10 +36,9 @@
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
-import org.opensearch.common.util.FeatureFlags;
import org.opensearch.index.query.Operator;
import org.opensearch.plugins.Plugin;
-import org.opensearch.test.ParameterizedOpenSearchIntegTestCase;
+import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase;
import java.util.Arrays;
import java.util.Collection;
@@ -49,10 +48,10 @@
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount;
-public class QueryStringWithAnalyzersIT extends ParameterizedOpenSearchIntegTestCase {
+public class QueryStringWithAnalyzersIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase {
- public QueryStringWithAnalyzersIT(Settings dynamicSettings) {
- super(dynamicSettings);
+ public QueryStringWithAnalyzersIT(Settings staticSettings) {
+ super(staticSettings);
}
@ParametersFactory
@@ -63,11 +62,6 @@ public static Collection