diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 3790be5f279d1..d44586ef4901a 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.DocReader; @@ -76,8 +77,7 @@ public class ScriptScoreBenchmark { private final PluginsService pluginsService = new PluginsService( Settings.EMPTY, null, - null, - Path.of(System.getProperty("plugins.dir")) + new PluginsLoader(null, Path.of(System.getProperty("plugins.dir"))) ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); diff --git a/branches.json b/branches.json index e81d511a88458..0e23a795664dd 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "8.17" + }, { "branch": "8.x" }, diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 1ed4e9d780576..9237c3ae8918c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -144,7 +144,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { doLast { ['main', 'test'].each { sourceSet -> - modifyXml(".idea/modules/libs/native/elasticsearch.libs.${project.project(':libs:native').name}.${sourceSet}.iml") { xml -> + modifyXml(".idea/modules/libs/native/elasticsearch.libs.native.${sourceSet}.iml") { xml -> xml.component.find { it.'@name' == 'NewModuleRootManager' }?.'@LANGUAGE_LEVEL' = 'JDK_21_PREVIEW' } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy index ad37fa9f02c8c..6c87149095186 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy @@ -15,16 +15,12 @@ import org.elasticsearch.gradle.internal.test.AntFixture import org.gradle.api.file.FileSystemOperations import org.gradle.api.file.ProjectLayout import org.gradle.api.provider.ProviderFactory -import org.gradle.api.tasks.Internal import org.gradle.process.ExecOperations import javax.inject.Inject abstract class AntFixtureStop extends LoggedExec implements FixtureStop { - @Internal - AntFixture fixture - @Inject AntFixtureStop(ProjectLayout projectLayout, ExecOperations execOperations, @@ -34,12 +30,12 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } void setFixture(AntFixture fixture) { - assert this.fixture == null - this.fixture = fixture; - final Object pid = "${-> this.fixture.pid}" - onlyIf("pidFile exists") { fixture.pidFile.exists() } + def pidFile = fixture.pidFile + def fixtureName = fixture.name + final Object pid = "${-> Integer.parseInt(pidFile.getText('UTF-8').trim())}" + onlyIf("pidFile exists") { pidFile.exists() } doFirst { - logger.info("Shutting down ${fixture.name} with pid ${pid}") + logger.info("Shutting down ${fixtureName} with pid ${pid}") } if (OS.current() == OS.WINDOWS) { @@ -51,9 +47,8 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } doLast { fileSystemOperations.delete { - it.delete(fixture.pidFile) + it.delete(pidFile) } } - this.fixture = fixture } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy index 81f21f8c62d86..01a3bdaee2337 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy @@ -29,11 +29,6 @@ import java.nio.charset.Charset */ public abstract class AntTask extends DefaultTask { - /** - * A buffer that will contain the output of the ant code run, - * if the output was not already written directly to stdout. - */ - public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() @Inject protected FileSystemOperations getFileSystemOperations() { @@ -57,6 +52,11 @@ public abstract class AntTask extends DefaultTask { // otherwise groovy replaces System.out, and you have no chance to debug // ant.saveStreams = false + /** + * A buffer that will contain the output of the ant code run, + * if the output was not already written directly to stdout. + */ + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy index f2837ff40fb79..88a68f1194858 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy @@ -10,22 +10,37 @@ package org.elasticsearch.gradle.internal.test import org.elasticsearch.gradle.OS + import org.elasticsearch.gradle.internal.AntFixtureStop import org.elasticsearch.gradle.internal.AntTask +import org.elasticsearch.gradle.testclusters.TestClusterInfo +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry import org.gradle.api.GradleException +import org.gradle.api.file.ProjectLayout +import org.gradle.api.provider.Property +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.api.provider.ValueSource +import org.gradle.api.provider.ValueSourceParameters +import org.gradle.api.tasks.Input import org.gradle.api.tasks.Internal import org.gradle.api.tasks.TaskProvider +import javax.inject.Inject + /** * A fixture for integration tests which runs in a separate process launched by Ant. */ -class AntFixture extends AntTask implements Fixture { +class AntFixture extends AntTask { /** The path to the executable that starts the fixture. */ @Internal String executable private final List arguments = new ArrayList<>() + private ProjectLayout projectLayout + private final ProviderFactory providerFactory void args(Object... args) { arguments.addAll(args) @@ -69,19 +84,14 @@ class AntFixture extends AntTask implements Fixture { return tmpFile.exists() } - private final TaskProvider stopTask - - AntFixture() { - stopTask = createStopTask() + @Inject + AntFixture(ProjectLayout projectLayout, ProviderFactory providerFactory) { + this.providerFactory = providerFactory + this.projectLayout = projectLayout; + TaskProvider stopTask = createStopTask() finalizedBy(stopTask) } - @Override - @Internal - TaskProvider getStopTask() { - return stopTask - } - @Override protected void runAnt(AntBuilder ant) { // reset everything @@ -231,7 +241,7 @@ class AntFixture extends AntTask implements Fixture { */ @Internal protected File getBaseDir() { - return new File(project.buildDir, "fixtures/${name}") + return new File(projectLayout.getBuildDirectory().getAsFile().get(), "fixtures/${name}") } /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */ @@ -242,7 +252,7 @@ class AntFixture extends AntTask implements Fixture { /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */ @Internal - protected File getPidFile() { + File getPidFile() { return new File(baseDir, 'pid') } @@ -264,6 +274,12 @@ class AntFixture extends AntTask implements Fixture { return portsFile.readLines("UTF-8").get(0) } + @Internal + Provider getAddressAndPortProvider() { + File thePortFile = portsFile + return providerFactory.provider(() -> thePortFile.readLines("UTF-8").get(0)) + } + /** Returns a file that wraps around the actual command when {@code spawn == true}. */ @Internal protected File getWrapperScript() { @@ -281,4 +297,22 @@ class AntFixture extends AntTask implements Fixture { protected File getRunLog() { return new File(cwd, 'run.log') } + + @Internal + Provider getAddressAndPortSource() { + return providerFactory.of(AntFixtureValueSource.class, spec -> { + spec.getParameters().getPortFile().set(portsFile); + }); + } + + static abstract class AntFixtureValueSource implements ValueSource { + @Override + String obtain() { + return getParameters().getPortFile().map { it.readLines("UTF-8").get(0) }.get() + } + + interface Parameters extends ValueSourceParameters { + Property getPortFile(); + } + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index 61dea47eb15c1..ca669276123b3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.Directory; +import org.gradle.api.file.FileCollection; import org.gradle.api.file.ProjectLayout; import org.gradle.api.file.RelativePath; import org.gradle.api.internal.file.FileOperations; @@ -244,10 +245,11 @@ public void apply(Project project) { yamlRestCompatTestTask.configure(testTask -> { testTask.systemProperty("tests.restCompat", true); // Use test runner and classpath from "normal" yaml source set + FileCollection outputFileCollection = yamlCompatTestSourceSet.getOutput(); testTask.setTestClassesDirs( yamlTestSourceSet.getOutput().getClassesDirs().plus(yamlCompatTestSourceSet.getOutput().getClassesDirs()) ); - testTask.onlyIf("Compatibility tests are available", t -> yamlCompatTestSourceSet.getOutput().isEmpty() == false); + testTask.onlyIf("Compatibility tests are available", t -> outputFileCollection.isEmpty() == false); testTask.setClasspath( yamlCompatTestSourceSet.getRuntimeClasspath() // remove the "normal" api and tests diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java index ef93dafa913cd..ba242a8e23861 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/RestCompatTestTransformTask.java @@ -137,7 +137,7 @@ public void skipTest(String fullTestName, String reason) { // However, the folder can be arbitrarily nest so, a == a1/a2/a3, and the test name can include forward slashes, so c == c1/c2/c3 // So we also need to support a1/a2/a3/b/c1/c2/c3 - String[] testParts = fullTestName.split("/"); + String[] testParts = fullTestName.split("/", 3); if (testParts.length < 3) { throw new IllegalArgumentException( "To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]" diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index c5b7a44a19d31..9c7d20d84a670 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -17,232 +17,201 @@ import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo class BwcVersionsSpec extends Specification { List versionLines = [] - def "current version is next major with last minor staged"() { - given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.16.2', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.0.0') - - when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) - def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } - - then: - unreleased == [ - (v('7.16.2')): new UnreleasedVersionInfo(v('7.16.2'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') - ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.16.2'), v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.minimumWireCompatibleVersion == v('7.17.0') - } - def "current version is next minor with next major and last minor both staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.17', ':distribution:bwc:staged'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.x', ':distribution:bwc:minor'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0'), v('9.1.0')] } def "current is next minor with upcoming minor staged"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.0', ':distribution:bwc:staged'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), '9.0', ':distribution:bwc:staged'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.1.0')] } def "current version is staged major"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:bugfix'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:bugfix'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0')] } def "current version is major with unreleased next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.16.1')): new UnreleasedVersionInfo(v('8.16.1'), '8.16', ':distribution:bwc:bugfix'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('9.0.0')] } def "current version is major with staged next minor"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('8.0.0', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('9.0.0', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.0')) + def bwc = new BwcVersions(versionLines, v('9.0.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.15.2')): new UnreleasedVersionInfo(v('7.15.2'), '7.15', ':distribution:bwc:bugfix'), - (v('7.16.0')): new UnreleasedVersionInfo(v('7.16.0'), '7.16', ':distribution:bwc:staged'), - (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + (v('8.15.2')): new UnreleasedVersionInfo(v('8.15.2'), '8.15', ':distribution:bwc:bugfix'), + (v('8.16.0')): new UnreleasedVersionInfo(v('8.16.0'), '8.16', ':distribution:bwc:staged'), + (v('8.17.0')): new UnreleasedVersionInfo(v('8.17.0'), '8.x', ':distribution:bwc:minor'), + (v('9.0.0')): new UnreleasedVersionInfo(v('9.0.0'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.17.0'), v('8.0.0')] + bwc.wireCompatible == [v('8.17.0'), v('9.0.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.17.0'), v('9.0.0')] } def "current version is next bugfix"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') when: - def bwc = new BwcVersions(versionLines, v('8.0.1')) + def bwc = new BwcVersions(versionLines, v('9.0.1')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), 'main', ':distribution'), + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), 'main', ':distribution'), ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1')] } def "current version is next minor with no staged releases"() { given: - addVersion('7.14.0', '8.9.0') - addVersion('7.14.1', '8.9.0') - addVersion('7.14.2', '8.9.0') - addVersion('7.15.0', '8.9.0') - addVersion('7.15.1', '8.9.0') - addVersion('7.15.2', '8.9.0') - addVersion('7.16.0', '8.10.0') - addVersion('7.16.1', '8.10.0') - addVersion('7.17.0', '8.10.0') - addVersion('7.17.1', '8.10.0') - addVersion('8.0.0', '9.0.0') - addVersion('8.0.1', '9.0.0') - addVersion('8.1.0', '9.1.0') + addVersion('8.14.0', '9.9.0') + addVersion('8.14.1', '9.9.0') + addVersion('8.14.2', '9.9.0') + addVersion('8.15.0', '9.9.0') + addVersion('8.15.1', '9.9.0') + addVersion('8.15.2', '9.9.0') + addVersion('8.16.0', '9.10.0') + addVersion('8.16.1', '9.10.0') + addVersion('8.17.0', '9.10.0') + addVersion('8.17.1', '9.10.0') + addVersion('9.0.0', '10.0.0') + addVersion('9.0.1', '10.0.0') + addVersion('9.1.0', '10.1.0') when: - def bwc = new BwcVersions(versionLines, v('8.1.0')) + def bwc = new BwcVersions(versionLines, v('9.1.0')) def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } then: unreleased == [ - (v('7.17.1')): new UnreleasedVersionInfo(v('7.17.1'), '7.17', ':distribution:bwc:maintenance'), - (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), '8.0', ':distribution:bwc:bugfix'), - (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') + (v('8.17.1')): new UnreleasedVersionInfo(v('8.17.1'), '8.17', ':distribution:bwc:maintenance'), + (v('9.0.1')): new UnreleasedVersionInfo(v('9.0.1'), '9.0', ':distribution:bwc:bugfix'), + (v('9.1.0')): new UnreleasedVersionInfo(v('9.1.0'), 'main', ':distribution') ] - bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] - bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] + bwc.wireCompatible == [v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] + bwc.indexCompatible == [v('8.14.0'), v('8.14.1'), v('8.14.2'), v('8.15.0'), v('8.15.1'), v('8.15.2'), v('8.16.0'), v('8.16.1'), v('8.17.0'), v('8.17.1'), v('9.0.0'), v('9.0.1'), v('9.1.0')] } private void addVersion(String elasticsearch, String lucene) { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index ec341ecfd8b79..77393fe16b4c2 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -76,6 +76,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final LinkedHashMap> waitConditions = new LinkedHashMap<>(); private final transient Project project; private final Provider reaper; + private final Provider testClustersRegistryProvider; private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -87,11 +88,14 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private boolean shared = false; + private int claims = 0; + public ElasticsearchCluster( String path, String clusterName, Project project, Provider reaper, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -104,6 +108,7 @@ public ElasticsearchCluster( this.clusterName = clusterName; this.project = project; this.reaper = reaper; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -120,6 +125,7 @@ public ElasticsearchCluster( clusterName + "-0", project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -177,6 +183,7 @@ public void setNumberOfNodes(int numberOfNodes) { clusterName + "-" + i, project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -408,6 +415,7 @@ public void setPreserveDataDir(boolean preserveDataDir) { public void freeze() { nodes.forEach(ElasticsearchNode::freeze); configurationFrozen.set(true); + nodes.whenObjectAdded(node -> { throw new IllegalStateException("Cannot add nodes to test cluster after is has been frozen"); }); } private void checkFrozen() { @@ -663,4 +671,11 @@ public String toString() { return "cluster{" + path + ":" + clusterName + "}"; } + int addClaim() { + return ++this.claims; + } + + int removeClaim() { + return --this.claims; + } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index df11733928f0f..90162591cfcef 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -124,6 +124,8 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final String name; transient private final Project project; private final Provider reaperServiceProvider; + private final Provider testClustersRegistryProvider; + private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -164,7 +166,6 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final List distributions = new ArrayList<>(); private int currentDistro = 0; private TestDistribution testDistribution; - private volatile Process esProcess; private Function nameCustomization = s -> s; private boolean isWorkingDirConfigured = false; private String httpPort = "0"; @@ -179,6 +180,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { String name, Project project, Provider reaperServiceProvider, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -191,6 +193,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { this.name = name; this.project = project; this.reaperServiceProvider = reaperServiceProvider; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -892,11 +895,13 @@ private void startElasticsearchProcess() { } } LOGGER.info("Running `{}` in `{}` for {} env: {}", command, workingDir, this, environment); + Process esProcess; try { esProcess = processBuilder.start(); } catch (IOException e) { throw new TestClustersException("Failed to start ES process for " + this, e); } + testClustersRegistryProvider.get().storeProcess(id(), esProcess); reaperServiceProvider.get().registerPid(toString(), esProcess.pid()); } @@ -982,6 +987,7 @@ public synchronized void stop(boolean tailLogs) { } catch (IOException e) { throw new UncheckedIOException(e); } + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); if (esProcess == null && tailLogs) { // This is a special case. If start() throws an exception the plugin will still call stop // Another exception here would eat the orriginal. @@ -1574,6 +1580,7 @@ public List getFeatureFlags() { @Override @Internal public boolean isProcessAlive() { + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); requireNonNull(esProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?"); return esProcess.isAlive(); } @@ -1602,6 +1609,10 @@ public int hashCode() { @Override public String toString() { + return id() + " (" + System.identityHashCode(this) + ")"; + } + + private String id() { return "node{" + path + ":" + name + "}"; } @@ -1702,7 +1713,7 @@ public CharSequence[] getArgs() { } } - private record FeatureFlag(String feature, Version from, Version until) { + public record FeatureFlag(String feature, Version from, Version until) { @Input public String getFeature() { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java new file mode 100644 index 0000000000000..07663de7a9df9 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.gradle.testclusters; + +import java.io.File; +import java.util.List; + +public class TestClusterInfo { + private final List allHttpSocketURI; + private final List allTransportPortURI; + private final List auditLogs; + + public TestClusterInfo(List allHttpSocketURI, List allTransportPortURI, List auditLogs) { + this.allHttpSocketURI = allHttpSocketURI; + this.allTransportPortURI = allTransportPortURI; + this.auditLogs = auditLogs; + } + + public List getAllHttpSocketURI() { + return allHttpSocketURI; + } + + public List getAllTransportPortURI() { + return allTransportPortURI; + } + + public List getAuditLogs() { + return auditLogs; + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java new file mode 100644 index 0000000000000..8ecadcdc6d2b1 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.jetbrains.annotations.Nullable; + +public abstract class TestClusterValueSource implements ValueSource { + + @Nullable + @Override + public TestClusterInfo obtain() { + String clusterName = getParameters().getClusterName().get(); + String path = getParameters().getPath().get(); + return getParameters().getService().get().getClusterDetails(path, clusterName); + } + + interface Parameters extends ValueSourceParameters { + Property getClusterName(); + + Property getPath(); + + Property getService(); + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java index f84aa2a0389c2..9e5fc1f09ac9e 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.gradle.testclusters; +import org.elasticsearch.gradle.ElasticsearchDistribution; import org.gradle.api.Task; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -34,10 +35,15 @@ default void useCluster(ElasticsearchCluster cluster) { if (cluster.getPath().equals(getProject().getPath()) == false) { throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster); } - - cluster.getNodes() - .all(node -> node.getDistributions().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze())))); - dependsOn(cluster.getPluginAndModuleConfigurations()); + if (cluster.getName().equals(getName())) { + for (ElasticsearchNode node : cluster.getNodes()) { + for (ElasticsearchDistribution distro : node.getDistributions()) { + ElasticsearchDistribution frozenDistro = distro.maybeFreeze(); + dependsOn(frozenDistro); + } + } + dependsOn(cluster.getPluginAndModuleConfigurations()); + } getClusters().add(cluster); } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 301782d52d1a3..ada31bc11a653 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.invocation.Gradle; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; @@ -106,15 +107,22 @@ public void apply(Project project) { runtimeJavaProvider = providerFactory.provider( () -> System.getenv("RUNTIME_JAVA_HOME") == null ? Jvm.current().getJavaHome() : new File(System.getenv("RUNTIME_JAVA_HOME")) ); + + // register cluster registry as a global build service + Provider testClustersRegistryProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); + // enable the DSL to describe clusters - NamedDomainObjectContainer container = createTestClustersContainerExtension(project, reaperServiceProvider); + NamedDomainObjectContainer container = createTestClustersContainerExtension( + project, + testClustersRegistryProvider, + reaperServiceProvider + ); // provide a task to be able to list defined clusters. createListClustersTask(project, container); - // register cluster registry as a global build service - project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); - // register throttle so we only run at most max-workers/2 nodes concurrently Provider testClustersThrottleProvider = project.getGradle() .getSharedServices() @@ -145,6 +153,7 @@ private void configureArtifactTransforms(Project project) { private NamedDomainObjectContainer createTestClustersContainerExtension( Project project, + Provider testClustersRegistryProvider, Provider reaper ) { // Create an extensions that allows describing clusters @@ -155,6 +164,7 @@ private NamedDomainObjectContainer createTestClustersConta name, project, reaper, + testClustersRegistryProvider, getFileSystemOperations(), getArchiveOperations(), getExecOperations(), @@ -199,7 +209,9 @@ public void apply(Project project) { Provider testClusterTasksService = project.getGradle() .getSharedServices() - .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> {}); + .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> { + spec.getParameters().getRegistry().set(registryProvider); + }); TestClustersRegistry registry = registryProvider.get(); // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters @@ -209,7 +221,7 @@ public void apply(Project project) { configureClaimClustersHook(project.getGradle(), registry); // Before each task, we determine if a cluster needs to be started for that task. - configureStartClustersHook(project.getGradle(), registry, testClusterTasksService); + configureStartClustersHook(project.getGradle()); // After each task we determine if there are clusters that are no longer needed. getEventsListenerRegistry().onTaskCompletion(testClusterTasksService); @@ -228,12 +240,7 @@ private static void configureClaimClustersHook(Gradle gradle, TestClustersRegist }); } - private void configureStartClustersHook( - Gradle gradle, - TestClustersRegistry registry, - Provider testClusterTasksService - ) { - testClusterTasksService.get().registry(registry); + private void configureStartClustersHook(Gradle gradle) { gradle.getTaskGraph().whenReady(taskExecutionGraph -> { taskExecutionGraph.getAllTasks() .stream() @@ -249,19 +256,14 @@ private void configureStartClustersHook( } } - static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { + static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { Map tasksMap = new HashMap<>(); - private TestClustersRegistry registryProvider; public void register(TestClustersAware task) { tasksMap.put(task.getPath(), task); } - public void registry(TestClustersRegistry registry) { - this.registryProvider = registry; - } - @Override public void onFinish(FinishEvent finishEvent) { if (finishEvent instanceof TaskFinishEvent taskFinishEvent) { @@ -273,11 +275,18 @@ public void onFinish(FinishEvent finishEvent) { if (task.getDidWork()) { task.getClusters() .forEach( - cluster -> registryProvider.stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) + cluster -> getParameters().getRegistry() + .get() + .stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) ); } } } } + + // Some parameters for the web server + interface Params extends BuildServiceParameters { + Property getRegistry(); + } } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java index 8de0dd67b654c..8d2a9217e7d0c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java @@ -10,6 +10,8 @@ import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; @@ -17,20 +19,23 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; public abstract class TestClustersRegistry implements BuildService { private static final Logger logger = Logging.getLogger(TestClustersRegistry.class); private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure"; private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false")); - private final Map claimsInventory = new HashMap<>(); - private final Set runningClusters = new HashSet<>(); + private final Map nodeProcesses = new HashMap<>(); + + @Inject + public abstract ProviderFactory getProviderFactory(); public void claimCluster(ElasticsearchCluster cluster) { - cluster.freeze(); - int claim = claimsInventory.getOrDefault(cluster, 0) + 1; - claimsInventory.put(cluster, claim); - if (claim > 1) { + int claims = cluster.addClaim(); + if (claims > 1) { cluster.setShared(true); } } @@ -43,6 +48,13 @@ public void maybeStartCluster(ElasticsearchCluster cluster) { cluster.start(); } + public Provider getClusterInfo(String clusterName) { + return getProviderFactory().of(TestClusterValueSource.class, spec -> { + spec.getParameters().getService().set(TestClustersRegistry.this); + spec.getParameters().getClusterName().set(clusterName); + }); + } + public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { if (taskFailed) { // If the task fails, and other tasks use this cluster, the other task will likely never be @@ -67,8 +79,7 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { runningClusters.remove(cluster); } } else { - int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1; - claimsInventory.put(cluster, currentClaims); + int currentClaims = cluster.removeClaim(); if (currentClaims <= 0 && runningClusters.contains(cluster)) { cluster.stop(false); runningClusters.remove(cluster); @@ -76,4 +87,33 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { } } + public TestClusterInfo getClusterDetails(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + return new TestClusterInfo( + cluster.getAllHttpSocketURI(), + cluster.getAllTransportPortURI(), + cluster.getNodes().stream().map(n -> n.getAuditLog()).collect(Collectors.toList()) + ); + } + + public void restart(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + cluster.restart(); + } + + public void storeProcess(String id, Process esProcess) { + nodeProcesses.put(id, esProcess); + } + + public Process getProcess(String id) { + return nodeProcesses.get(id); + } } diff --git a/docs/changelog/115091.yaml b/docs/changelog/115091.yaml new file mode 100644 index 0000000000000..762bcca5e8c52 --- /dev/null +++ b/docs/changelog/115091.yaml @@ -0,0 +1,7 @@ +pr: 115091 +summary: Added stricter range type checks and runtime warnings for ENRICH +area: ES|QL +type: bug +issues: + - 107357 + - 116799 diff --git a/docs/changelog/116358.yaml b/docs/changelog/116358.yaml new file mode 100644 index 0000000000000..58b44a1e9bcf5 --- /dev/null +++ b/docs/changelog/116358.yaml @@ -0,0 +1,5 @@ +pr: 116358 +summary: Update Deberta tokenizer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/116515.yaml b/docs/changelog/116515.yaml new file mode 100644 index 0000000000000..6c0d473361e52 --- /dev/null +++ b/docs/changelog/116515.yaml @@ -0,0 +1,5 @@ +pr: 116515 +summary: Esql/lookup join grammar +area: ES|QL +type: feature +issues: [] diff --git a/docs/changelog/116689.yaml b/docs/changelog/116689.yaml new file mode 100644 index 0000000000000..0b1d1646868aa --- /dev/null +++ b/docs/changelog/116689.yaml @@ -0,0 +1,10 @@ +pr: 116689 +summary: Deprecate `_source.mode` in mappings +area: Mapping +type: deprecation +issues: [] +deprecation: + title: Deprecate `_source.mode` in mappings + area: Mapping + details: Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + impact: Use `index.mapping.source.mode` index setting instead diff --git a/docs/changelog/116765.yaml b/docs/changelog/116765.yaml new file mode 100644 index 0000000000000..ec2357c17acaf --- /dev/null +++ b/docs/changelog/116765.yaml @@ -0,0 +1,5 @@ +pr: 116765 +summary: Metrics for incremental bulk splits +area: Distributed +type: enhancement +issues: [] diff --git a/docs/changelog/116819.yaml b/docs/changelog/116819.yaml new file mode 100644 index 0000000000000..afe06c583fe55 --- /dev/null +++ b/docs/changelog/116819.yaml @@ -0,0 +1,5 @@ +pr: 116819 +summary: ESQL - Add match operator (:) +area: Search +type: feature +issues: [] diff --git a/docs/changelog/116944.yaml b/docs/changelog/116944.yaml new file mode 100644 index 0000000000000..e7833e49cf965 --- /dev/null +++ b/docs/changelog/116944.yaml @@ -0,0 +1,11 @@ +pr: 116944 +summary: "Remove support for type, fields, `copy_to` and boost in metadata field definition" +area: Mapping +type: breaking +issues: [] +breaking: + title: "Remove support for type, fields, copy_to and boost in metadata field definition" + area: Mapping + details: The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + impact: Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. + notable: false diff --git a/docs/changelog/116957.yaml b/docs/changelog/116957.yaml new file mode 100644 index 0000000000000..1020190de180d --- /dev/null +++ b/docs/changelog/116957.yaml @@ -0,0 +1,5 @@ +pr: 116957 +summary: Propagate scoring function through random sampler +area: Machine Learning +type: bug +issues: [ 110134 ] diff --git a/docs/changelog/116962.yaml b/docs/changelog/116962.yaml new file mode 100644 index 0000000000000..8f16b00e3f9fc --- /dev/null +++ b/docs/changelog/116962.yaml @@ -0,0 +1,5 @@ +pr: 116962 +summary: "Add special case for elastic reranker in inference API" +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/116970.yaml b/docs/changelog/116970.yaml new file mode 100644 index 0000000000000..66de673dfb53c --- /dev/null +++ b/docs/changelog/116970.yaml @@ -0,0 +1,11 @@ +pr: 116970 +summary: Remove legacy params from range query +area: Search +type: breaking +issues: [] +breaking: + title: Remove legacy params from range query + area: REST API + details: The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + impact: Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. + notable: false diff --git a/docs/changelog/116980.yaml b/docs/changelog/116980.yaml new file mode 100644 index 0000000000000..140324fd40b92 --- /dev/null +++ b/docs/changelog/116980.yaml @@ -0,0 +1,6 @@ +pr: 116980 +summary: "ESQL: Fix sorts containing `_source`" +area: ES|QL +type: bug +issues: + - 116659 diff --git a/docs/changelog/116995.yaml b/docs/changelog/116995.yaml new file mode 100644 index 0000000000000..a0467c630edf3 --- /dev/null +++ b/docs/changelog/116995.yaml @@ -0,0 +1,5 @@ +pr: 116995 +summary: "Apm-data: disable date_detection for all apm data streams" +area: Data streams +type: enhancement +issues: [] \ No newline at end of file diff --git a/docs/changelog/117080.yaml b/docs/changelog/117080.yaml new file mode 100644 index 0000000000000..5909f966e0fa2 --- /dev/null +++ b/docs/changelog/117080.yaml @@ -0,0 +1,5 @@ +pr: 117080 +summary: Esql Enable Date Nanos (tech preview) +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/117105.yaml b/docs/changelog/117105.yaml new file mode 100644 index 0000000000000..de56c4d521a62 --- /dev/null +++ b/docs/changelog/117105.yaml @@ -0,0 +1,6 @@ +pr: 117105 +summary: Fix long metric deserialize & add - auto-resize needs to be set manually +area: CCS +type: bug +issues: + - 116914 diff --git a/docs/changelog/117153.yaml b/docs/changelog/117153.yaml new file mode 100644 index 0000000000000..f7640c0a7ed6a --- /dev/null +++ b/docs/changelog/117153.yaml @@ -0,0 +1,5 @@ +pr: 117153 +summary: "ESQL: fix the column position in errors" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/117182.yaml b/docs/changelog/117182.yaml new file mode 100644 index 0000000000000..b5398bec1ef30 --- /dev/null +++ b/docs/changelog/117182.yaml @@ -0,0 +1,6 @@ +pr: 117182 +summary: Change synthetic source logic for `constant_keyword` +area: Mapping +type: bug +issues: + - 117083 diff --git a/docs/changelog/90529.yaml b/docs/changelog/90529.yaml new file mode 100644 index 0000000000000..a014c82259a9e --- /dev/null +++ b/docs/changelog/90529.yaml @@ -0,0 +1,26 @@ +pr: 90529 +summary: Output a consistent format when generating error json +area: Infra/REST API +type: "breaking" +issues: + - 89387 +breaking: + title: Error JSON structure has changed when detailed errors are disabled + area: REST API + details: |- + This change modifies the JSON format of error messages returned to REST clients + when detailed messages are turned off. + Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, + just consisted of a single `"error"` text field with some basic information. + Setting `http.detailed_errors.enabled: true` (the default) changed this field + to an object with more detailed information. + With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always + be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed + errors are enabled. + To use the previous structure for non-detailed errors, use the v8 REST API. + impact: |- + If you have set `http.detailed_errors.enabled: false` (the default is `true`) + the structure of JSON when any exceptions occur now matches the structure when + detailed errors are enabled. + To use the previous structure for non-detailed errors, use the v8 REST API. + notable: false diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index bbbea192f0f86..e640fa77c71ee 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -159,6 +159,8 @@ node. <5> The decider which led to the `no` decision for the node. <6> An explanation as to why the decider returned a `no` decision, with a helpful hint pointing to the setting that led to the decision. In this example, a newly created index has <> that requires that it only be allocated to a node named `nonexistent_node`, which does not exist, so the index is unable to allocate. +See https://www.youtube.com/watch?v=5z3n2VgusLE[this video] for a walkthrough of troubleshooting a node and index setting mismatch. + [[maximum-number-of-retries-exceeded]] ====== Maximum number of retries exceeded @@ -235,7 +237,9 @@ primary shard that was previously allocated. ---- // NOTCONSOLE -TIP: If a shard is unassigned with an allocation status of `no_valid_shard_copy`, then you should <>. If all the nodes containing in-sync copies of a shard are lost, then you can <>. +If a shard is unassigned with an allocation status of `no_valid_shard_copy`, then you should <>. If all the nodes containing in-sync copies of a shard are lost, then you can <>. + +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. ===== Unassigned replica shard diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 235113ac1394a..33e748d7eb7c1 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -38,12 +38,12 @@ image::images/esql/processing-command.svg[A processing command changing an input * <> * <> ifeval::["{release-state}"=="unreleased"] -* experimental:[] <> +//* experimental:[] <> endif::[] * <> * <> ifeval::["{release-state}"=="unreleased"] -* experimental:[] <> +//* experimental:[] <> endif::[] * experimental:[] <> * <> @@ -63,12 +63,12 @@ include::processing-commands/enrich.asciidoc[] include::processing-commands/eval.asciidoc[] include::processing-commands/grok.asciidoc[] ifeval::["{release-state}"=="unreleased"] -include::processing-commands/inlinestats.asciidoc[] +//include::processing-commands/inlinestats.asciidoc[] endif::[] include::processing-commands/keep.asciidoc[] include::processing-commands/limit.asciidoc[] ifeval::["{release-state}"=="unreleased"] -include::processing-commands/lookup.asciidoc[] +//include::processing-commands/lookup.asciidoc[] endif::[] include::processing-commands/mv_expand.asciidoc[] include::processing-commands/rename.asciidoc[] diff --git a/docs/reference/esql/esql-enrich-data.asciidoc b/docs/reference/esql/esql-enrich-data.asciidoc index c48118d1c367a..ad34e29f1a55b 100644 --- a/docs/reference/esql/esql-enrich-data.asciidoc +++ b/docs/reference/esql/esql-enrich-data.asciidoc @@ -138,8 +138,33 @@ include::{es-ref-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=upda include::../ingest/enrich.asciidoc[tag=update-enrich-policy] -==== Limitations +==== Enrich Policy Types and Limitations +The {esql} `ENRICH` command supports all three enrich policy types: + +`geo_match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`match`:: +Matches enrich data to incoming documents based on a <>. +For an example, see <>. + +`range`:: +Matches a number, date, or IP address in incoming documents to a range in the +enrich index based on a <>. For an example, +see <>. + // tag::limitations[] -The {esql} `ENRICH` command only supports enrich policies of type `match`. -Furthermore, `ENRICH` only supports enriching on a column of type `keyword`. +While all three enrich policy types are supported, there are some limitations to be aware of: + +* The `geo_match` enrich policy type only supports the `intersects` spatial relation. +* It is required that the `match_field` in the `ENRICH` command is of the correct type. +For example, if the enrich policy is of type `geo_match`, the `match_field` in the `ENRICH` +command must be of type `geo_point` or `geo_shape`. +Likewise, a `range` enrich policy requires a `match_field` of type `integer`, `long`, `date`, or `ip`, +depending on the type of the range field in the original enrich index. +* However, this constraint is relaxed for `range` policies when the `match_field` is of type `KEYWORD`. +In this case the field values will be parsed during query execution, row by row. +If any value fails to parse, the output values for that row will be set to `null`, +an appropriate warning will be produced and the query will continue to execute. // end::limitations[] diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index a7c0e5e01a867..151ca803bf2eb 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -14,6 +14,7 @@ Detailed reference documentation for the {esql} language: * <> * <> * <> +* <> include::esql-syntax.asciidoc[] include::esql-commands.asciidoc[] @@ -23,3 +24,4 @@ include::multivalued-fields.asciidoc[] include::esql-process-data-with-dissect-grok.asciidoc[] include::esql-enrich-data.asciidoc[] include::implicit-casting.asciidoc[] +include::time-spans.asciidoc[] diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index 1772e956bd9e2..c2849e4889f98 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -25,6 +25,9 @@ include::processing-commands/limit.asciidoc[tag=limitation] * `alias` * `boolean` * `date` +* `date_nanos` (Tech Preview) +** The following functions don't yet support date nanos: `bucket`, `date_format`, `date_parse`, `date_diff`, `date_extract` +** You can use `to_datetime` to cast to millisecond dates to use unsupported functions * `double` (`float`, `half_float`, `scaled_float` are represented as `double`) * `ip` * `keyword` family including `keyword`, `constant_keyword`, and `wildcard` @@ -50,7 +53,6 @@ include::processing-commands/limit.asciidoc[tag=limitation] ** `position` ** `aggregate_metric_double` * Date/time -** `date_nanos` ** `date_range` * Other types ** `binary` diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index 63b8738266132..8e07a627567df 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -92,8 +92,8 @@ https://en.wikipedia.org/wiki/Query_plan[EXPLAIN PLAN]. ifeval::["{release-state}"=="unreleased"] -`table`:: -(Optional, object) Named "table" parameters that can be referenced by the <> command. +//`table`:: +//(Optional, object) Named "table" parameters that can be referenced by the <> command. endif::[] [discrete] diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index c7f741d064310..ba1c4ca820381 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -157,21 +157,15 @@ FROM employees ==== Timespan literals Datetime intervals and timespans can be expressed using timespan literals. -Timespan literals are a combination of a number and a qualifier. These -qualifiers are supported: - -* `millisecond`/`milliseconds`/`ms` -* `second`/`seconds`/`sec`/`s` -* `minute`/`minutes`/`min` -* `hour`/`hours`/`h` -* `day`/`days`/`d` -* `week`/`weeks`/`w` -* `month`/`months`/`mo` -* `quarter`/`quarters`/`q` -* `year`/`years`/`yr`/`y` +Timespan literals are a combination of a number and a temporal unit. The +supported temporal units are listed in <>. +More examples of the usages of time spans can be found in +<>. + Timespan literals are not whitespace sensitive. These expressions are all valid: * `1day` * `1 day` * `1 day` + diff --git a/docs/reference/esql/functions/binary.asciidoc b/docs/reference/esql/functions/binary.asciidoc index 72d466ae83d11..59bdadecc4923 100644 --- a/docs/reference/esql/functions/binary.asciidoc +++ b/docs/reference/esql/functions/binary.asciidoc @@ -87,6 +87,7 @@ Supported types: include::types/greater_than_or_equal.asciidoc[] +[[esql-add]] ==== Add `+` [.text-center] image::esql/functions/signature/add.svg[Embedded,opts=inline] @@ -98,6 +99,7 @@ Supported types: include::types/add.asciidoc[] +[[esql-subtract]] ==== Subtract `-` [.text-center] image::esql/functions/signature/sub.svg[Embedded,opts=inline] diff --git a/docs/reference/esql/functions/examples/count.asciidoc b/docs/reference/esql/functions/examples/count.asciidoc index fb696b51e054c..33ed054d3d1e2 100644 --- a/docs/reference/esql/functions/examples/count.asciidoc +++ b/docs/reference/esql/functions/examples/count.asciidoc @@ -37,7 +37,7 @@ include::{esql-specs}/stats.csv-spec[tag=count-where] |=== include::{esql-specs}/stats.csv-spec[tag=count-where-result] |=== -To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)` +To count the same stream of data based on two different expressions use the pattern `COUNT( OR NULL)`. This builds on the three-valued logic ({wikipedia}/Three-valued_logic[3VL]) of the language: `TRUE OR NULL` is `TRUE`, but `FALSE OR NULL` is `NULL`, plus the way COUNT handles `NULL`s: `COUNT(TRUE)` and `COUNT(FALSE)` are both 1, but `COUNT(NULL)` is 0. [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=count-or-null] diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index bf498f690551c..51693d9d30660 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -172,6 +172,48 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "date_nanos", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "date_nanos", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + }, + { + "name" : "elseValue", + "type" : "date_nanos", + "optional" : true, + "description" : "The value that's returned when no condition evaluates to `true`." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 7f49195190951..c929323397c9b 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -88,6 +88,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "Expression to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "Other expression to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/count.json b/docs/reference/esql/functions/kibana/definition/count.json index 88d4ba3d3e339..329a18c4d9d01 100644 --- a/docs/reference/esql/functions/kibana/definition/count.json +++ b/docs/reference/esql/functions/kibana/definition/count.json @@ -151,7 +151,7 @@ ], "examples" : [ "FROM employees\n| STATS COUNT(height)", - "FROM employees \n| STATS count = COUNT(*) BY languages \n| SORT languages DESC", + "FROM employees\n| STATS count = COUNT(*) BY languages\n| SORT languages DESC", "ROW words=\"foo;bar;baz;qux;quux;foo\"\n| STATS word_count = COUNT(SPLIT(words, \";\"))", "ROW n=1\n| WHERE n < 0\n| STATS COUNT(n)", "ROW n=1\n| STATS COUNT(n > 0 OR NULL), COUNT(n < 0 OR NULL)" diff --git a/docs/reference/esql/functions/kibana/definition/count_distinct.json b/docs/reference/esql/functions/kibana/definition/count_distinct.json index 3addd08df60df..54b99ee84ce2d 100644 --- a/docs/reference/esql/functions/kibana/definition/count_distinct.json +++ b/docs/reference/esql/functions/kibana/definition/count_distinct.json @@ -136,6 +136,72 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "integer", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Column or literal for which to count the number of distinct values." + }, + { + "name" : "precision", + "type" : "unsigned_long", + "optional" : true, + "description" : "Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/date_trunc.json b/docs/reference/esql/functions/kibana/definition/date_trunc.json index 871994407233b..cdda984a0ce7e 100644 --- a/docs/reference/esql/functions/kibana/definition/date_trunc.json +++ b/docs/reference/esql/functions/kibana/definition/date_trunc.json @@ -22,6 +22,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "interval", + "type" : "date_period", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -39,6 +57,24 @@ ], "variadic" : false, "returnType" : "date" + }, + { + "params" : [ + { + "name" : "interval", + "type" : "time_duration", + "optional" : false, + "description" : "Interval; expressed using the timespan literal syntax." + }, + { + "name" : "date", + "type" : "date_nanos", + "optional" : false, + "description" : "Date expression" + } + ], + "variadic" : false, + "returnType" : "date_nanos" } ], "examples" : [ diff --git a/docs/reference/esql/functions/kibana/definition/equals.json b/docs/reference/esql/functions/kibana/definition/equals.json index 59df59eaccc4e..885d949f4b20f 100644 --- a/docs/reference/esql/functions/kibana/definition/equals.json +++ b/docs/reference/esql/functions/kibana/definition/equals.json @@ -77,6 +77,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greater_than.json b/docs/reference/esql/functions/kibana/definition/greater_than.json index 7354112551e2c..cf6e30a0a4547 100644 --- a/docs/reference/esql/functions/kibana/definition/greater_than.json +++ b/docs/reference/esql/functions/kibana/definition/greater_than.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json b/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json index 832eed417ef4a..2535c68af6acf 100644 --- a/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json +++ b/docs/reference/esql/functions/kibana/definition/greater_than_or_equal.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index eebb4fad1eb1d..077100317dfca 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -53,6 +53,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 02fa58f92eaef..18ec65c60f475 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -52,6 +52,24 @@ "variadic" : true, "returnType" : "date" }, + { + "params" : [ + { + "name" : "first", + "type" : "date_nanos", + "optional" : false, + "description" : "First of the columns to evaluate." + }, + { + "name" : "rest", + "type" : "date_nanos", + "optional" : true, + "description" : "The rest of the columns to evaluate." + } + ], + "variadic" : true, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/less_than.json b/docs/reference/esql/functions/kibana/definition/less_than.json index 66578d73b8e9c..a73754d200d46 100644 --- a/docs/reference/esql/functions/kibana/definition/less_than.json +++ b/docs/reference/esql/functions/kibana/definition/less_than.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json b/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json index 5ffd4567cdb07..7af477db32a34 100644 --- a/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json +++ b/docs/reference/esql/functions/kibana/definition/less_than_or_equal.json @@ -23,6 +23,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/match.json b/docs/reference/esql/functions/kibana/definition/match.json index 8a355360a790f..4a5b05a3f501b 100644 --- a/docs/reference/esql/functions/kibana/definition/match.json +++ b/docs/reference/esql/functions/kibana/definition/match.json @@ -78,7 +78,7 @@ } ], "examples" : [ - "from books \n| where match(author, \"Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json new file mode 100644 index 0000000000000..2facebfc44e57 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -0,0 +1,49 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "name" : "match_operator", + "description" : "Performs a match query on the specified field. Returns true if the provided query matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + ], + "preview" : true, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/definition/max.json b/docs/reference/esql/functions/kibana/definition/max.json index 45fd26571b091..7f3d2215ee099 100644 --- a/docs/reference/esql/functions/kibana/definition/max.json +++ b/docs/reference/esql/functions/kibana/definition/max.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/min.json b/docs/reference/esql/functions/kibana/definition/min.json index ae71fba049dbe..74e3fd8208f1b 100644 --- a/docs/reference/esql/functions/kibana/definition/min.json +++ b/docs/reference/esql/functions/kibana/definition/min.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_count.json b/docs/reference/esql/functions/kibana/definition/mv_count.json index 4767b35ec7cac..90ace2525f710 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_count.json +++ b/docs/reference/esql/functions/kibana/definition/mv_count.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "integer" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "integer" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index bfca58bc3e140..ce2c96dbc1757 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -53,6 +53,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index a2b6358023e4b..552f568c9b171 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index b6dc268af5305..78d7b348a6042 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_max.json b/docs/reference/esql/functions/kibana/definition/mv_max.json index 27d2b010dc02c..a1e55c58cff70 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_max.json +++ b/docs/reference/esql/functions/kibana/definition/mv_max.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_min.json b/docs/reference/esql/functions/kibana/definition/mv_min.json index 410e97335687f..7998ca4eda94e 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_min.json +++ b/docs/reference/esql/functions/kibana/definition/mv_min.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index dbbfe0ffb5a78..df4d48145fac6 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -100,6 +100,30 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "Start position. If `null`, the function returns `null`. The start argument can be negative. An index of -1 is used to specify the last value in the list." + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "End position(included). Optional; if omitted, the position at `start` is returned. The end argument can be negative. An index of -1 is used to specify the last value in the list." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_sort.json b/docs/reference/esql/functions/kibana/definition/mv_sort.json index 4cb255fb0afcb..072c05743af33 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_sort.json +++ b/docs/reference/esql/functions/kibana/definition/mv_sort.json @@ -40,6 +40,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "order", + "type" : "keyword", + "optional" : true, + "description" : "Sort order. The valid options are ASC and DESC, the default is ASC." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/not_equals.json b/docs/reference/esql/functions/kibana/definition/not_equals.json index 69389d4c8d077..24f31115cbc37 100644 --- a/docs/reference/esql/functions/kibana/definition/not_equals.json +++ b/docs/reference/esql/functions/kibana/definition/not_equals.json @@ -77,6 +77,24 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "lhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + }, + { + "name" : "rhs", + "type" : "date_nanos", + "optional" : false, + "description" : "An expression." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/qstr.json b/docs/reference/esql/functions/kibana/definition/qstr.json index 9823c3cff8923..76473349a3414 100644 --- a/docs/reference/esql/functions/kibana/definition/qstr.json +++ b/docs/reference/esql/functions/kibana/definition/qstr.json @@ -30,7 +30,7 @@ } ], "examples" : [ - "from books \n| where qstr(\"author: Faulkner\")\n| keep book_no, author \n| sort book_no \n| limit 5;" + "FROM books \n| WHERE QSTR(\"author: Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" ], "preview" : true, "snapshot_only" : false diff --git a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json index 07ffe84444f02..d9409bceb8e6f 100644 --- a/docs/reference/esql/functions/kibana/definition/to_date_nanos.json +++ b/docs/reference/esql/functions/kibana/definition/to_date_nanos.json @@ -4,7 +4,92 @@ "name" : "to_date_nanos", "description" : "Converts an input to a nanosecond-resolution date value (aka date_nanos).", "note" : "The range for date nanos is 1970-01-01T00:00:00.000000000Z to 2262-04-11T23:47:16.854775807Z. Additionally, integers cannot be converted into date nanos, as the range of integer nanoseconds only covers about 2 seconds after epoch.", - "signatures" : [ ], + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "date", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "double", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "long", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date_nanos" + } + ], "preview" : true, "snapshot_only" : false } diff --git a/docs/reference/esql/functions/kibana/definition/to_datetime.json b/docs/reference/esql/functions/kibana/definition/to_datetime.json index 072aa66aac669..8f9ecbd139d32 100644 --- a/docs/reference/esql/functions/kibana/definition/to_datetime.json +++ b/docs/reference/esql/functions/kibana/definition/to_datetime.json @@ -17,6 +17,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "date" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/to_long.json b/docs/reference/esql/functions/kibana/definition/to_long.json index afd6de001bbc6..eb1ce7220c3f9 100644 --- a/docs/reference/esql/functions/kibana/definition/to_long.json +++ b/docs/reference/esql/functions/kibana/definition/to_long.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/to_string.json b/docs/reference/esql/functions/kibana/definition/to_string.json index 33e95d5bed1c2..1c86e81b31136 100644 --- a/docs/reference/esql/functions/kibana/definition/to_string.json +++ b/docs/reference/esql/functions/kibana/definition/to_string.json @@ -52,6 +52,18 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "Input value. The input can be a single- or multi-valued column or an expression." + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/values.json b/docs/reference/esql/functions/kibana/definition/values.json index ae69febd4f755..95ac402bb242a 100644 --- a/docs/reference/esql/functions/kibana/definition/values.json +++ b/docs/reference/esql/functions/kibana/definition/values.json @@ -28,6 +28,18 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/docs/match.md b/docs/reference/esql/functions/kibana/docs/match.md index 3c06662982bbf..b866637b41b85 100644 --- a/docs/reference/esql/functions/kibana/docs/match.md +++ b/docs/reference/esql/functions/kibana/docs/match.md @@ -6,9 +6,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a match query on the specified field. Returns true if the provided query matches the row. ``` -from books -| where match(author, "Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; ``` diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md new file mode 100644 index 0000000000000..fda8b24ff76cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -0,0 +1,14 @@ + + +### MATCH_OPERATOR +Performs a match query on the specified field. Returns true if the provided query matches the row. + +``` +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +``` diff --git a/docs/reference/esql/functions/kibana/docs/qstr.md b/docs/reference/esql/functions/kibana/docs/qstr.md index 37b5777623185..9b5dc3f9a22eb 100644 --- a/docs/reference/esql/functions/kibana/docs/qstr.md +++ b/docs/reference/esql/functions/kibana/docs/qstr.md @@ -6,9 +6,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ Performs a query string query. Returns true if the provided query string matches the row. ``` -from books -| where qstr("author: Faulkner") -| keep book_no, author -| sort book_no -| limit 5; +FROM books +| WHERE QSTR("author: Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; ``` diff --git a/docs/reference/esql/functions/operators.asciidoc b/docs/reference/esql/functions/operators.asciidoc index ee344a52687c2..a1a2226335e9b 100644 --- a/docs/reference/esql/functions/operators.asciidoc +++ b/docs/reference/esql/functions/operators.asciidoc @@ -16,6 +16,7 @@ Boolean operators for comparing against one or multiple expressions. * <> * <> * <> +* experimental:[] <> // end::op_list[] include::binary.asciidoc[] @@ -26,3 +27,4 @@ include::cast.asciidoc[] include::in.asciidoc[] include::like.asciidoc[] include::rlike.asciidoc[] +include::search.asciidoc[] diff --git a/docs/reference/esql/functions/search.asciidoc b/docs/reference/esql/functions/search.asciidoc new file mode 100644 index 0000000000000..ae1b003b65abb --- /dev/null +++ b/docs/reference/esql/functions/search.asciidoc @@ -0,0 +1,23 @@ +[discrete] +[[esql-search-operators]] +=== Search operators + +The only search operator is match (`:`). + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +The match operator performs a <> on the specified field. Returns true if the provided query matches the row. + +[.text-center] +image::esql/functions/signature/match_operator.svg[Embedded,opts=inline] + +include::types/match.asciidoc[] + +[source.merge.styled,esql] +---- +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field-result] +|=== diff --git a/docs/reference/esql/functions/signature/match_operator.svg b/docs/reference/esql/functions/signature/match_operator.svg new file mode 100644 index 0000000000000..70cea841622eb --- /dev/null +++ b/docs/reference/esql/functions/signature/match_operator.svg @@ -0,0 +1 @@ +field:query \ No newline at end of file diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index c6fb6a091e9d0..9e6915a37fc14 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -13,6 +13,8 @@ boolean | cartesian_shape | cartesian_shape | cartesian_shape boolean | cartesian_shape | | cartesian_shape boolean | date | date | date boolean | date | | date +boolean | date_nanos | date_nanos | date_nanos +boolean | date_nanos | | date_nanos boolean | double | double | double boolean | double | | double boolean | geo_point | geo_point | geo_point diff --git a/docs/reference/esql/functions/types/coalesce.asciidoc b/docs/reference/esql/functions/types/coalesce.asciidoc index 23a249494e0a2..b6479dc7ff86a 100644 --- a/docs/reference/esql/functions/types/coalesce.asciidoc +++ b/docs/reference/esql/functions/types/coalesce.asciidoc @@ -10,6 +10,7 @@ boolean | | boolean cartesian_point | cartesian_point | cartesian_point cartesian_shape | cartesian_shape | cartesian_shape date | date | date +date_nanos | date_nanos | date_nanos geo_point | geo_point | geo_point geo_shape | geo_shape | geo_shape integer | integer | integer diff --git a/docs/reference/esql/functions/types/count_distinct.asciidoc b/docs/reference/esql/functions/types/count_distinct.asciidoc index c365c8814573c..f5758a8914d20 100644 --- a/docs/reference/esql/functions/types/count_distinct.asciidoc +++ b/docs/reference/esql/functions/types/count_distinct.asciidoc @@ -13,6 +13,10 @@ date | integer | long date | long | long date | unsigned_long | long date | | long +date_nanos | integer | long +date_nanos | long | long +date_nanos | unsigned_long | long +date_nanos | | long double | integer | long double | long | long double | unsigned_long | long diff --git a/docs/reference/esql/functions/types/date_trunc.asciidoc b/docs/reference/esql/functions/types/date_trunc.asciidoc index aa7dee99c6c44..c610f9119e51c 100644 --- a/docs/reference/esql/functions/types/date_trunc.asciidoc +++ b/docs/reference/esql/functions/types/date_trunc.asciidoc @@ -6,5 +6,7 @@ |=== interval | date | result date_period | date | date +date_period | date_nanos | date_nanos time_duration | date | date +time_duration | date_nanos | date_nanos |=== diff --git a/docs/reference/esql/functions/types/equals.asciidoc b/docs/reference/esql/functions/types/equals.asciidoc index ad0e46ef4b8da..8d48b7ebf084a 100644 --- a/docs/reference/esql/functions/types/equals.asciidoc +++ b/docs/reference/esql/functions/types/equals.asciidoc @@ -9,6 +9,7 @@ boolean | boolean | boolean cartesian_point | cartesian_point | boolean cartesian_shape | cartesian_shape | boolean date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greater_than.asciidoc b/docs/reference/esql/functions/types/greater_than.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/greater_than.asciidoc +++ b/docs/reference/esql/functions/types/greater_than.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/greater_than_or_equal.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/greatest.asciidoc b/docs/reference/esql/functions/types/greatest.asciidoc index 7df77a6991315..0bc11b569d426 100644 --- a/docs/reference/esql/functions/types/greatest.asciidoc +++ b/docs/reference/esql/functions/types/greatest.asciidoc @@ -8,6 +8,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean date | date | date +date_nanos | date_nanos | date_nanos double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/least.asciidoc b/docs/reference/esql/functions/types/least.asciidoc index 7df77a6991315..0bc11b569d426 100644 --- a/docs/reference/esql/functions/types/least.asciidoc +++ b/docs/reference/esql/functions/types/least.asciidoc @@ -8,6 +8,7 @@ first | rest | result boolean | boolean | boolean boolean | | boolean date | date | date +date_nanos | date_nanos | date_nanos double | double | double integer | integer | integer integer | | integer diff --git a/docs/reference/esql/functions/types/less_than.asciidoc b/docs/reference/esql/functions/types/less_than.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/less_than.asciidoc +++ b/docs/reference/esql/functions/types/less_than.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc index c506328126a94..8000fd34c8507 100644 --- a/docs/reference/esql/functions/types/less_than_or_equal.asciidoc +++ b/docs/reference/esql/functions/types/less_than_or_equal.asciidoc @@ -6,6 +6,7 @@ |=== lhs | rhs | result date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/match_operator.asciidoc b/docs/reference/esql/functions/types/match_operator.asciidoc new file mode 100644 index 0000000000000..5c6afacdce1b2 --- /dev/null +++ b/docs/reference/esql/functions/types/match_operator.asciidoc @@ -0,0 +1,10 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | query | result +keyword | keyword | boolean +text | text | boolean +|=== diff --git a/docs/reference/esql/functions/types/max.asciidoc b/docs/reference/esql/functions/types/max.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/max.asciidoc +++ b/docs/reference/esql/functions/types/max.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/min.asciidoc b/docs/reference/esql/functions/types/min.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/min.asciidoc +++ b/docs/reference/esql/functions/types/min.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 260c531731f04..c58c4eda44396 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -9,6 +9,7 @@ boolean | integer cartesian_point | integer cartesian_shape | integer date | integer +date_nanos | integer double | integer geo_point | integer geo_shape | integer diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 976de79bb0910..1524ec86cd5ec 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index 47736e76d1db4..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index 47736e76d1db4..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -9,6 +9,7 @@ boolean | boolean cartesian_point | cartesian_point cartesian_shape | cartesian_shape date | date +date_nanos | date_nanos double | double geo_point | geo_point geo_shape | geo_shape diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index d4e014554c86c..ffba14489b97c 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index d4e014554c86c..ffba14489b97c 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 60c1f6315a599..75f45e333ee0c 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -9,6 +9,7 @@ boolean | integer | integer | boolean cartesian_point | integer | integer | cartesian_point cartesian_shape | integer | integer | cartesian_shape date | integer | integer | date +date_nanos | integer | integer | date_nanos double | integer | integer | double geo_point | integer | integer | geo_point geo_shape | integer | integer | geo_shape diff --git a/docs/reference/esql/functions/types/mv_sort.asciidoc b/docs/reference/esql/functions/types/mv_sort.asciidoc index c21ea5983945e..83d3e45c7be02 100644 --- a/docs/reference/esql/functions/types/mv_sort.asciidoc +++ b/docs/reference/esql/functions/types/mv_sort.asciidoc @@ -7,6 +7,7 @@ field | order | result boolean | keyword | boolean date | keyword | date +date_nanos | keyword | date_nanos double | keyword | double integer | keyword | integer ip | keyword | ip diff --git a/docs/reference/esql/functions/types/not_equals.asciidoc b/docs/reference/esql/functions/types/not_equals.asciidoc index ad0e46ef4b8da..8d48b7ebf084a 100644 --- a/docs/reference/esql/functions/types/not_equals.asciidoc +++ b/docs/reference/esql/functions/types/not_equals.asciidoc @@ -9,6 +9,7 @@ boolean | boolean | boolean cartesian_point | cartesian_point | boolean cartesian_shape | cartesian_shape | boolean date | date | boolean +date_nanos | date_nanos | boolean double | double | boolean double | integer | boolean double | long | boolean diff --git a/docs/reference/esql/functions/types/to_date_nanos.asciidoc b/docs/reference/esql/functions/types/to_date_nanos.asciidoc index 1f50b65f25a77..dec6833d14b08 100644 --- a/docs/reference/esql/functions/types/to_date_nanos.asciidoc +++ b/docs/reference/esql/functions/types/to_date_nanos.asciidoc @@ -5,5 +5,11 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== field | result -date_nanos +date | date_nanos +date_nanos | date_nanos +double | date_nanos +keyword | date_nanos +long | date_nanos +text | date_nanos +unsigned_long | date_nanos |=== diff --git a/docs/reference/esql/functions/types/to_datetime.asciidoc b/docs/reference/esql/functions/types/to_datetime.asciidoc index 80c986efca794..118ed6c09c11c 100644 --- a/docs/reference/esql/functions/types/to_datetime.asciidoc +++ b/docs/reference/esql/functions/types/to_datetime.asciidoc @@ -6,6 +6,7 @@ |=== field | result date | date +date_nanos | date double | date integer | date keyword | date diff --git a/docs/reference/esql/functions/types/to_long.asciidoc b/docs/reference/esql/functions/types/to_long.asciidoc index a07990cb1cfbf..1009543c1bbde 100644 --- a/docs/reference/esql/functions/types/to_long.asciidoc +++ b/docs/reference/esql/functions/types/to_long.asciidoc @@ -9,6 +9,7 @@ boolean | long counter_integer | long counter_long | long date | long +date_nanos | long double | long integer | long keyword | long diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 26a5b31a2a589..9d4188214b3d8 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -9,6 +9,7 @@ boolean | keyword cartesian_point | keyword cartesian_shape | keyword date | keyword +date_nanos | keyword double | keyword geo_point | keyword geo_shape | keyword diff --git a/docs/reference/esql/functions/types/values.asciidoc b/docs/reference/esql/functions/types/values.asciidoc index 564fb8dc3bfb0..adf457dac31b8 100644 --- a/docs/reference/esql/functions/types/values.asciidoc +++ b/docs/reference/esql/functions/types/values.asciidoc @@ -7,6 +7,7 @@ field | result boolean | boolean date | date +date_nanos | date_nanos double | double integer | integer ip | ip diff --git a/docs/reference/esql/implicit-casting.asciidoc b/docs/reference/esql/implicit-casting.asciidoc index ffb6d3fc35acb..b24be0b645472 100644 --- a/docs/reference/esql/implicit-casting.asciidoc +++ b/docs/reference/esql/implicit-casting.asciidoc @@ -5,7 +5,7 @@ Implicit casting ++++ -Often users will input `date`, `ip`, `version`, `date_period` or `time_duration` as simple strings in their queries for use in predicates, functions, or expressions. {esql} provides <> to explicitly convert these strings into the desired data types. +Often users will input `date`, `date_period`, `time_duration`, `ip` or `version` as simple strings in their queries for use in predicates, functions, or expressions. {esql} provides <> to explicitly convert these strings into the desired data types. Without implicit casting users must explicitly code these `to_X` functions in their queries, when string literals don't match the target data types they are assigned or compared to. Here is an example of using `to_datetime` to explicitly perform a data type conversion. @@ -18,7 +18,10 @@ FROM employees | LIMIT 1 ---- -Implicit casting improves usability, by automatically converting string literals to the target data type. This is most useful when the target data type is `date`, `ip`, `version`, `date_period` or `time_duration`. It is natural to specify these as a string in queries. +[discrete] +[[esql-implicit-casting-example]] +==== Implicit casting example +Implicit casting automatically converts string literals to the target data type. This allows users to specify string values for types like `date`, `date_period`, `time_duration`, `ip` and `version` in their queries. The first query can be coded without calling the `to_datetime` function, as follows: @@ -31,35 +34,36 @@ FROM employees | LIMIT 1 ---- -[float] -=== Implicit casting support +[discrete] +[[esql-implicit-casting-supported-operations]] +==== Operations that support implicit casting The following table details which {esql} operations support implicit casting for different data types. [%header.monospaced.styled,format=dsv,separator=|] |=== -||ScalarFunction*|Operator*|<>|<> -|DATE|Y|Y|Y|N -|IP|Y|Y|Y|N -|VERSION|Y|Y|Y|N -|BOOLEAN|Y|Y|Y|N -|DATE_PERIOD/TIME_DURATION|Y|N|Y|N +|ScalarFunctions|Operators|<>|<> +DATE|Y|Y|Y|N +DATE_PERIOD/TIME_DURATION|Y|N|Y|N +IP|Y|Y|Y|N +VERSION|Y|Y|Y|N +BOOLEAN|Y|Y|Y|N |=== -ScalarFunction* includes: +ScalarFunctions includes: -<> +* <> -<> +* <> -<> +* <> -Operator* includes: +Operators includes: -<> +* <> -<> +* <> -<> +* <> diff --git a/docs/reference/esql/processing-commands/inlinestats.asciidoc b/docs/reference/esql/processing-commands/inlinestats.disabled similarity index 100% rename from docs/reference/esql/processing-commands/inlinestats.asciidoc rename to docs/reference/esql/processing-commands/inlinestats.disabled diff --git a/docs/reference/esql/processing-commands/lookup.asciidoc b/docs/reference/esql/processing-commands/lookup.disabled similarity index 100% rename from docs/reference/esql/processing-commands/lookup.asciidoc rename to docs/reference/esql/processing-commands/lookup.disabled diff --git a/docs/reference/esql/time-spans.asciidoc b/docs/reference/esql/time-spans.asciidoc new file mode 100644 index 0000000000000..d2aa0c4fa252e --- /dev/null +++ b/docs/reference/esql/time-spans.asciidoc @@ -0,0 +1,111 @@ +[[esql-time-spans]] +=== {esql} time spans + +++++ +Time spans +++++ + +Time spans represent intervals between two datetime values. There are currently two supported types of time spans: + +* `DATE_PERIOD` specifies intervals in years, quarters, months, weeks and days +* `TIME_DURATION` specifies intervals in hours, minutes, seconds and milliseconds + +A time span requires two elements: an integer value and a temporal unit. + +Time spans work with grouping functions such as <>, scalar functions such as <> and arithmetic operators such as <> and <>. Convert strings to time spans using <>, <>, or the cast operators `::DATE_PERIOD`, `::TIME_DURATION`. + +[discrete] +[[esql-time-spans-examples]] +==== Examples of using time spans in {esql} + + +With `BUCKET`: +[source.merge.styled,esql] +---- +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogramWithSpan] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/bucket.csv-spec[tag=docsBucketWeeklyHistogramWithSpan-result] +|=== + + +With `DATE_TRUNC`: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] +|=== + + +With `+` and/or `-`: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + + +When a time span is provided as a named parameter in string format, `TO_DATEPERIOD`, `::DATE_PERIOD`, `TO_TIMEDURATION` or `::TIME_DURATION` can be used to convert to its corresponding time span value for arithmetic operations like `+` and/or `-`. +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | EVAL x = hire_date + ?timespan::DATE_PERIOD, y = hire_date - TO_DATEPERIOD(?timespan) + """, + "params": [{"timespan" : "1 day"}] +} +---- + +When a time span is provided as a named parameter in string format, it can be automatically converted to its corresponding time span value in grouping functions and scalar functions, like `BUCKET` and `DATE_TRUNC`. +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" + | STATS hires_per_week = COUNT(*) BY week = BUCKET(hire_date, ?timespan) + | SORT week + """, + "params": [{"timespan" : "1 week"}] +} +---- + +[source, esql] +---- +POST /_query +{ + "query": """ + FROM employees + | KEEP first_name, last_name, hire_date + | EVAL year_hired = DATE_TRUNC(?timespan, hire_date) + """, + "params": [{"timespan" : "1 year"}] +} +---- + +[discrete] +[[esql-time-spans-table]] +==== Supported temporal units +[%header.monospaced.styled,format=dsv,separator=|] +|=== +Temporal Units|Valid Abbreviations +year|y, yr, years +quarter|q, quarters +month|mo, months +week|w, weeks +day|d, days +hour|h, hours +minute|min, minutes +second|s, sec, seconds +millisecond|ms, milliseconds +|=== diff --git a/docs/reference/how-to.asciidoc b/docs/reference/how-to.asciidoc index ec514eb05be29..eeac2fe5c2f50 100644 --- a/docs/reference/how-to.asciidoc +++ b/docs/reference/how-to.asciidoc @@ -1,23 +1,21 @@ [[how-to]] -= How to += Optimizations -[partintro] --- -Elasticsearch ships with defaults which are intended to give a good out of -the box experience. Full text search, highlighting, aggregations, and indexing -should all just work without the user having to change anything. +Elasticsearch's default settings provide a good out-of-box experience for basic operations like full text search, highlighting, aggregations, and indexing. -Once you better understand how you want to use Elasticsearch, however, -there are a number of optimizations you can make to improve performance -for your use case. +However, there are a number of optimizations you can make to improve performance for your use case. -This section provides guidance about which changes should and shouldn't be -made. --- +This section provides recommendations for various use cases. -include::how-to/general.asciidoc[] +* <> +* <> +* <> +* <> +* <> +* <> +* <> -include::how-to/recipes.asciidoc[] +include::how-to/general.asciidoc[] include::how-to/indexing-speed.asciidoc[] diff --git a/docs/reference/how-to/recipes.asciidoc b/docs/reference/how-to/recipes.asciidoc index b46f624aef51d..de23404be6164 100644 --- a/docs/reference/how-to/recipes.asciidoc +++ b/docs/reference/how-to/recipes.asciidoc @@ -1,7 +1,7 @@ [[recipes]] -== Recipes +== Search relevance optimizations -This section includes a few recipes to help with common problems: +This section includes a few recipes to help with common search relevance issues: * <> * <> diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 5c5a8977d34d4..a578826e31fac 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -88,8 +88,9 @@ pages independently of the query. There are two main queries that allow combining static score contributions with textual relevance, eg. as computed with BM25: - - <> - - <> + +* <> +* <> For instance imagine that you have a `pagerank` field that you wish to combine with the BM25 score so that the final score is equal to diff --git a/docs/reference/ilm/error-handling.asciidoc b/docs/reference/ilm/error-handling.asciidoc index e8df44653e9c5..911dc8b9cce40 100644 --- a/docs/reference/ilm/error-handling.asciidoc +++ b/docs/reference/ilm/error-handling.asciidoc @@ -8,8 +8,9 @@ When this happens, {ilm-init} moves the index to an `ERROR` step. If {ilm-init} cannot resolve the error automatically, execution is halted until you resolve the underlying issues with the policy, index, or cluster. -See this https://www.youtube.com/watch?v=VCIqkji3IwY[{ilm-init} health video] -for example troubleshooting walkthrough. +See https://www.youtube.com/watch?v=VCIqkji3IwY[this video] +for a walkthrough of troubleshooting current {ilm-init} health issues, and https://www.youtube.com/watch?v=onrnnwjYWSQ[this video] +for a walkthrough of troubleshooting historical {ilm-init} issues. For example, you might have a `shrink-index` policy that shrinks an index to four shards once it is at least five days old: diff --git a/docs/reference/modules/http.asciidoc b/docs/reference/modules/http.asciidoc index 984fb0d5bf1c1..17d67e00e2ebb 100644 --- a/docs/reference/modules/http.asciidoc +++ b/docs/reference/modules/http.asciidoc @@ -145,11 +145,9 @@ NOTE: This header is only returned when the setting is set to `true`. `http.detailed_errors.enabled`:: (<>, boolean) -Configures whether detailed error reporting in HTTP responses is enabled. -Defaults to `true`, which means that HTTP requests that include the -<> will return a -detailed error message including a stack trace if they encounter an exception. -If set to `false`, requests with the `?error_trace` parameter are rejected. +Configures whether detailed error reporting in HTTP responses is enabled. Defaults to `true`. +When this option is set to `false`, only basic information is returned if an error occurs in the request, +and requests with <> set are rejected. `http.pipelining.max_events`:: (<>, integer) diff --git a/docs/reference/search/search-your-data/search-your-data.asciidoc b/docs/reference/search/search-your-data/search-your-data.asciidoc index 82541412db4bd..9ef1ae0ebc59b 100644 --- a/docs/reference/search/search-your-data/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data/search-your-data.asciidoc @@ -43,10 +43,12 @@ DSL, with a simplified user experience. Create search applications based on your results directly in the Kibana Search UI. include::search-api.asciidoc[] +include::../../how-to/recipes.asciidoc[] +// ☝️ search relevance recipes include::retrievers-overview.asciidoc[] include::knn-search.asciidoc[] include::semantic-search.asciidoc[] include::search-across-clusters.asciidoc[] include::search-with-synonyms.asciidoc[] include::search-application-overview.asciidoc[] -include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] +include::behavioral-analytics/behavioral-analytics-overview.asciidoc[] \ No newline at end of file diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 36f311b1cdd97..1b08a802a444f 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -6,6 +6,9 @@ You can use AWS S3 as a repository for {ref}/snapshot-restore.html[Snapshot/Rest *If you are looking for a hosted solution of Elasticsearch on AWS, please visit https://www.elastic.co/cloud/.* +See https://www.youtube.com/watch?v=ACqfyzWf-xs[this video] +for a walkthrough of connecting an AWS S3 repository. + [[repository-s3-usage]] ==== Getting started diff --git a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc index fe9422d6d4c53..e1ceefb92bbec 100644 --- a/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc +++ b/docs/reference/troubleshooting/common-issues/diagnose-unassigned-shards.asciidoc @@ -8,5 +8,6 @@ In order to diagnose the unassigned shards in your deployment use the following include::{es-ref-dir}/tab-widgets/troubleshooting/data/diagnose-unassigned-shards-widget.asciidoc[] - +See https://www.youtube.com/watch?v=v2mbeSd1vTQ[this video] +for a walkthrough of monitoring allocation health. diff --git a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc index eb56a37562c31..4289242deb486 100644 --- a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc +++ b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc @@ -19,6 +19,8 @@ operate to have a green health status. In many cases, your cluster will recover to green health status automatically. If the cluster doesn't automatically recover, then you must <> the remaining problems so management and cleanup activities can proceed. +See https://www.youtube.com/watch?v=v2mbeSd1vTQ[this video] +for a walkthrough of monitoring allocation health. [discrete] [[diagnose-cluster-status]] @@ -90,6 +92,8 @@ PUT _cluster/settings } ---- +See https://www.youtube.com/watch?v=MiKKUdZvwnI[this video] for walkthrough of troubleshooting "no allocations are allowed". + [discrete] [[fix-cluster-status-recover-nodes]] ===== Recover lost nodes @@ -262,3 +266,5 @@ POST _cluster/reroute ---- // TEST[s/^/PUT my-index\n/] // TEST[catch:bad_request] + +See https://www.youtube.com/watch?v=6OAg9IyXFO4[this video] for a walkthrough of troubleshooting `no_valid_shard_copy`. \ No newline at end of file diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc index a944ca88d285d..c6d46b9e94fc8 100644 --- a/docs/reference/troubleshooting/diagnostic.asciidoc +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -13,6 +13,8 @@ This information can be used to troubleshoot problems with your cluster. For exa You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. +See this https://www.youtube.com/watch?v=Bb6SaqhqYHw[this video] for a walkthrough of capturing an {es} diagnostic. + [discrete] [[diagnostic-tool-requirements]] === Requirements diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 1bdd93e3a7470..05fda8e0244de 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -16,7 +16,7 @@ checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" -forbiddenApis = "de.thetaphi:forbiddenapis:3.6" +forbiddenApis = "de.thetaphi:forbiddenapis:3.8" gradle-enterprise = "com.gradle:develocity-gradle-plugin:3.18.1" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 5e874b52fc4c6..3f56071f6f495 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1336,6 +1336,11 @@ + + + + + diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index f5fe8d41c2243..a3bbb611f3e68 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,19 +9,29 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; +import java.io.IOException; import java.lang.reflect.Method; -import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { + @Override - public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { + public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { return new InstrumenterImpl(classNameSuffix, instrumentationMethods); } @@ -33,9 +43,97 @@ public MethodKey methodKeyForTarget(Method targetMethod) { return new MethodKey( Type.getInternalName(targetMethod.getDeclaringClass()), targetMethod.getName(), - Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList(), - Modifier.isStatic(targetMethod.getModifiers()) + Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList() ); } + @Override + public Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, + IOException { + var methodsToInstrument = new HashMap(); + var checkerClass = Class.forName(entitlementCheckerClassName); + var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod( + int access, + String checkerMethodName, + String checkerMethodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); + + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkerMethod = new CheckerMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); + + methodsToInstrument.put(methodToInstrument, checkerMethod); + + return mv; + } + }; + reader.accept(visitor, 0); + return methodsToInstrument; + } + + private static final Type CLASS_TYPE = Type.getType(Class.class); + + static MethodKey parseCheckerMethodSignature(String checkerMethodName, Type[] checkerMethodArgumentTypes) { + var classNameStartIndex = checkerMethodName.indexOf('$'); + var classNameEndIndex = checkerMethodName.lastIndexOf('$'); + + if (classNameStartIndex == -1 || classNameStartIndex >= classNameEndIndex) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect name format. " + + "It should be either check$$methodName (instance) or check$package_ClassName$methodName (static)", + checkerMethodName + ) + ); + } + + // No "className" (check$$methodName) -> method is static, and we'll get the class from the actual typed argument + final boolean targetMethodIsStatic = classNameStartIndex + 1 != classNameEndIndex; + final String targetMethodName = checkerMethodName.substring(classNameEndIndex + 1); + + final String targetClassName; + final List targetParameterTypes; + if (targetMethodIsStatic) { + if (checkerMethodArgumentTypes.length < 1 || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + "It must have a first argument of Class type.", + checkerMethodName + ) + ); + } + + targetClassName = checkerMethodName.substring(classNameStartIndex + 1, classNameEndIndex).replace('_', '/'); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(1).map(Type::getInternalName).toList(); + } else { + if (checkerMethodArgumentTypes.length < 2 + || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false + || checkerMethodArgumentTypes[1].getSort() != Type.OBJECT) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + + "It must have a first argument of Class type, and a second argument of the class containing the method to " + + "instrument", + checkerMethodName + ) + ); + } + var targetClassType = checkerMethodArgumentTypes[1]; + targetClassName = targetClassType.getInternalName(); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(2).map(Type::getInternalName).toList(); + } + return new MethodKey(targetClassName, targetMethodName, targetParameterTypes); + } } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 63c9ccd80be70..53e76372b107d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.objectweb.asm.AnnotationVisitor; @@ -23,7 +24,6 @@ import java.io.IOException; import java.io.InputStream; -import java.lang.reflect.Method; import java.util.Map; import java.util.stream.Stream; @@ -40,9 +40,9 @@ public class InstrumenterImpl implements Instrumenter { * To avoid class name collisions during testing without an agent to replace classes in-place. */ private final String classNameSuffix; - private final Map instrumentationMethods; + private final Map instrumentationMethods; - public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { + public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { this.classNameSuffix = classNameSuffix; this.instrumentationMethods = instrumentationMethods; } @@ -138,12 +138,7 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str var mv = super.visitMethod(access, name, descriptor, signature, exceptions); if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; - var key = new MethodKey( - className, - name, - Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(), - isStatic - ); + var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = instrumentationMethods.get(key); if (instrumentationMethod != null) { // LOGGER.debug("Will instrument method {}", key); @@ -177,7 +172,7 @@ private void addClassAnnotationIfNeeded() { class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; private final String instrumentedMethodDescriptor; - private final Method instrumentationMethod; + private final CheckerMethod instrumentationMethod; private boolean hasCallerSensitiveAnnotation = false; EntitlementMethodVisitor( @@ -185,7 +180,7 @@ class EntitlementMethodVisitor extends MethodVisitor { MethodVisitor methodVisitor, boolean instrumentedMethodIsStatic, String instrumentedMethodDescriptor, - Method instrumentationMethod + CheckerMethod instrumentationMethod ) { super(api, methodVisitor); this.instrumentedMethodIsStatic = instrumentedMethodIsStatic; @@ -262,9 +257,12 @@ private void forwardIncomingArguments() { private void invokeInstrumentationMethod() { mv.visitMethodInsn( INVOKEINTERFACE, - Type.getInternalName(instrumentationMethod.getDeclaringClass()), - instrumentationMethod.getName(), - Type.getMethodDescriptor(instrumentationMethod), + instrumentationMethod.className(), + instrumentationMethod.methodName(), + Type.getMethodDescriptor( + Type.VOID_TYPE, + instrumentationMethod.parameterDescriptors().stream().map(Type::getType).toArray(Type[]::new) + ), true ); } diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java new file mode 100644 index 0000000000000..c0ff5d59d3c72 --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.test.ESTestCase; +import org.objectweb.asm.Type; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +@ESTestCase.WithoutSecurityManager +public class InstrumentationServiceImplTests extends ESTestCase { + + final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); + + static class TestTargetClass {} + + interface TestChecker { + void check$org_example_TestTargetClass$staticMethod(Class clazz, int arg0, String arg1, Object arg2); + + void check$$instanceMethodNoArgs(Class clazz, TestTargetClass that); + + void check$$instanceMethodWithArgs(Class clazz, TestTargetClass that, int x, int y); + } + + interface TestCheckerOverloads { + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, int y); + + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, String y); + } + + public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); + + assertThat(methodsMap, aMapWithSize(3)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodNoArgs", + List.of() + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodNoArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodWithArgs", + List.of("I", "I") + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodWithArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;", + "I", + "I" + ) + ) + ) + ) + ); + } + + public void testInstrumentationTargetLookupWithOverloads() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerOverloads.class.getName()); + + assertThat(methodsMap, aMapWithSize(2)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "java/lang/String"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "I"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "I") + ) + ) + ) + ); + } + + public void testParseCheckerMethodSignatureStaticMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureStaticMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of("I", "java/lang/String")))); + } + + public void testParseCheckerMethodSignatureStaticMethodInnerClass() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$InnerClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass$InnerClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureIncorrectName() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$staticMethod", new Type[] { Type.getType(Class.class) }) + ); + + assertThat(exception.getMessage(), containsString("has incorrect name format")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$ClassName$staticMethod", new Type[] {}) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentType() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$ClassName$staticMethod", + new Type[] { Type.getType(String.class) } + ) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of() + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(String.class) }) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(Class.class) }) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes2() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType("I") } + ) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } +} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index 9a57e199d4907..e3f5539999be5 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -11,7 +11,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -22,11 +24,12 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; -import java.util.stream.Collectors; +import java.util.Map; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; import static org.objectweb.asm.Opcodes.INVOKESTATIC; /** @@ -53,7 +56,12 @@ public void initialize() { * Contains all the virtual methods from {@link ClassToInstrument}, * allowing this test to call them on the dynamically loaded instrumented class. */ - public interface Testable {} + public interface Testable { + // This method is here to demonstrate Instrumenter does not get confused by overloads + void someMethod(int arg); + + void someMethod(int arg, String anotherArg); + } /** * This is a placeholder for real class library methods. @@ -71,10 +79,26 @@ public static void systemExit(int status) { public static void anotherSystemExit(int status) { assertEquals(123, status); } + + public void someMethod(int arg) {} + + public void someMethod(int arg, String anotherArg) {} + + public static void someStaticMethod(int arg) {} + + public static void someStaticMethod(int arg, String anotherArg) {} } static final class TestException extends RuntimeException {} + public interface MockEntitlementChecker extends EntitlementChecker { + void checkSomeStaticMethod(Class clazz, int arg); + + void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); + + void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + } + /** * We're not testing the permission checking logic here; * only that the instrumented methods are calling the correct check methods with the correct arguments. @@ -82,7 +106,7 @@ static final class TestException extends RuntimeException {} * just to demonstrate that the injected bytecodes succeed in calling these methods. * It also asserts that the arguments are correct. */ - public static class TestEntitlementChecker implements EntitlementChecker { + public static class TestEntitlementChecker implements MockEntitlementChecker { /** * This allows us to test that the instrumentation is correct in both cases: * if the check throws, and if it doesn't. @@ -90,9 +114,12 @@ public static class TestEntitlementChecker implements EntitlementChecker { volatile boolean isActive; int checkSystemExitCallCount = 0; + int checkSomeStaticMethodIntCallCount = 0; + int checkSomeStaticMethodIntStringCallCount = 0; + int checkSomeInstanceMethodCallCount = 0; @Override - public void checkSystemExit(Class callerClass, int status) { + public void check$java_lang_System$exit(Class callerClass, int status) { checkSystemExitCallCount++; assertSame(InstrumenterTests.class, callerClass); assertEquals(123, status); @@ -104,11 +131,48 @@ private void throwIfActive() { throw new TestException(); } } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg) { + checkSomeStaticMethodIntCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg, String anotherArg) { + checkSomeStaticMethodIntStringCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + assertEquals("abc", anotherArg); + throwIfActive(); + } + + @Override + public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg, String anotherArg) { + checkSomeInstanceMethodCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertThat( + that.getClass().getName(), + startsWith("org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$ClassToInstrument") + ); + assertEquals(123, arg); + assertEquals("def", anotherArg); + throwIfActive(); + } } public void testClassIsInstrumented() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); @@ -117,7 +181,7 @@ public void testClassIsInstrumented() throws Exception { } Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW", + classToInstrument.getName() + "_NEW", newBytecode ); @@ -134,7 +198,14 @@ public void testClassIsInstrumented() throws Exception { public void testClassIsNotInstrumentedTwice() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -146,7 +217,7 @@ public void testClassIsNotInstrumentedTwice() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -159,7 +230,16 @@ public void testClassIsNotInstrumentedTwice() throws Exception { public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit", "anotherSystemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod, + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("anotherSystemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -171,7 +251,7 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -185,22 +265,78 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(2)); } - /** This test doesn't replace ClassToInstrument in-place but instead loads a separate - * class ClassToInstrument_NEW that contains the instrumentation. Because of this, - * we need to configure the Transformer to use a MethodKey and instrumentationMethod - * with slightly different signatures (using the common interface Testable) which - * is not what would happen when it's run by the agent. - */ - private InstrumenterImpl createInstrumenter(Class classToInstrument, String... methodNames) throws NoSuchMethodException { - Method v1 = EntitlementChecker.class.getMethod("checkSystemExit", Class.class, int.class); - var methods = Arrays.stream(methodNames).map(name -> { - try { - return instrumentationService.methodKeyForTarget(classToInstrument.getMethod(name, int.class)); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toUnmodifiableMap(name -> name, name -> v1)); + public void testInstrumenterWorksWithOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class), + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); + + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntCallCount, is(1)); + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount, is(1)); + } + + public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeInstanceMethod", Class.class, Testable.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + Testable testTargetClass = (Testable) (newClass.getConstructor().newInstance()); + + // This overload is not instrumented, so it will not throw + testTargetClass.someMethod(123); + assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); + + assertThat(getTestEntitlementChecker().checkSomeInstanceMethodCallCount, is(1)); + } + + /** This test doesn't replace classToInstrument in-place but instead loads a separate + * class with the same class name plus a "_NEW" suffix (classToInstrument.class.getName() + "_NEW") + * that contains the instrumentation. Because of this, we need to configure the Transformer to use a + * MethodKey and instrumentationMethod with slightly different signatures (using the common interface + * Testable) which is not what would happen when it's run by the agent. + */ + private InstrumenterImpl createInstrumenter(Map methods) throws NoSuchMethodException { Method getter = InstrumenterTests.class.getMethod("getTestEntitlementChecker"); return new InstrumenterImpl("_NEW", methods) { /** @@ -220,13 +356,38 @@ protected void pushEntitlementChecker(MethodVisitor mv) { }; } + private static CheckerMethod getCheckerMethod(Class clazz, String methodName, Class... parameterTypes) + throws NoSuchMethodException { + var method = clazz.getMethod(methodName, parameterTypes); + return new CheckerMethod( + Type.getInternalName(clazz), + method.getName(), + Arrays.stream(Type.getArgumentTypes(method)).map(Type::getDescriptor).toList() + ); + } + /** * Calling a static method of a dynamically loaded class is significantly more cumbersome * than calling a virtual method. */ - private static void callStaticMethod(Class c, String methodName, int status) throws NoSuchMethodException, IllegalAccessException { + private static void callStaticMethod(Class c, String methodName, int arg) throws NoSuchMethodException, IllegalAccessException { + try { + c.getMethod(methodName, int.class).invoke(null, arg); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } + + private static void callStaticMethod(Class c, String methodName, int arg1, String arg2) throws NoSuchMethodException, + IllegalAccessException { try { - c.getMethod(methodName, int.class).invoke(null, status); + c.getMethod(methodName, int.class, String.class).invoke(null, arg1, arg2); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause instanceof TestException n) { diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 5ebb7d00e26f5..167c93c90df5c 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -10,5 +10,5 @@ package org.elasticsearch.entitlement.bridge; public interface EntitlementChecker { - void checkSystemExit(Class callerClass, int status); + void check$java_lang_System$exit(Class callerClass, int status); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 7f68457baea9e..01b8f4d574f90 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -15,6 +15,7 @@ import com.sun.tools.attach.VirtualMachine; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -22,15 +23,33 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.Objects; +import java.util.function.Function; public class EntitlementBootstrap { + public record BootstrapArgs(Collection> pluginData, Function, String> pluginResolver) {} + + private static BootstrapArgs bootstrapArgs; + + public static BootstrapArgs bootstrapArgs() { + return bootstrapArgs; + } + /** - * Activates entitlement checking. Once this method returns, calls to forbidden methods - * will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * Activates entitlement checking. Once this method returns, calls to methods protected by Entitlements from classes without a valid + * policy will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * @param pluginData a collection of (plugin path, boolean), that holds the paths of all the installed Elasticsearch modules and + * plugins, and whether they are Java modular or not. + * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). */ - public static void bootstrap() { + public static void bootstrap(Collection> pluginData, Function, String> pluginResolver) { logger.debug("Loading entitlement agent"); + if (EntitlementBootstrap.bootstrapArgs != null) { + throw new IllegalStateException("plugin data is already set"); + } + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(Objects.requireNonNull(pluginData), Objects.requireNonNull(pluginResolver)); exportInitializationToAgent(); loadAgent(findAgentJar()); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 155d5a27c606b..6d31abe4cf054 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -9,17 +9,35 @@ package org.elasticsearch.entitlement.initialization; +import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.provider.ProviderLocator; +import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.Policy; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import org.elasticsearch.entitlement.runtime.policy.PolicyParser; +import org.elasticsearch.entitlement.runtime.policy.Scope; +import java.io.IOException; import java.lang.instrument.Instrumentation; -import java.lang.reflect.Method; +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -29,6 +47,9 @@ * to begin injecting our instrumentation. */ public class EntitlementInitialization { + + private static final String POLICY_FILE_NAME = "entitlement-policy.yaml"; + private static ElasticsearchEntitlementChecker manager; // Note: referenced by bridge reflectively @@ -38,16 +59,82 @@ public static EntitlementChecker checker() { // Note: referenced by agent reflectively public static void initialize(Instrumentation inst) throws Exception { - manager = new ElasticsearchEntitlementChecker(); + manager = new ElasticsearchEntitlementChecker(createPolicyManager()); + + Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( + "org.elasticsearch.entitlement.bridge.EntitlementChecker" + ); + + var classesToTransform = methodMap.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); + + inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), classesToTransform), true); + // TODO: should we limit this array somehow? + var classesToRetransform = classesToTransform.stream().map(EntitlementInitialization::internalNameToClass).toArray(Class[]::new); + inst.retransformClasses(classesToRetransform); + } + + private static Class internalNameToClass(String internalName) { + try { + return Class.forName(internalName.replace('/', '.'), false, ClassLoader.getPlatformClassLoader()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + private static PolicyManager createPolicyManager() throws IOException { + Map pluginPolicies = createPluginPolicies(EntitlementBootstrap.bootstrapArgs().pluginData()); + + // TODO: What should the name be? + // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it + var serverPolicy = new Policy("server", List.of()); + return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); + } + + private static Map createPluginPolicies(Collection> pluginData) throws IOException { + Map pluginPolicies = new HashMap<>(pluginData.size()); + for (Tuple entry : pluginData) { + Path pluginRoot = entry.v1(); + boolean isModular = entry.v2(); + + String pluginName = pluginRoot.getFileName().toString(); + final Policy policy = loadPluginPolicy(pluginRoot, isModular, pluginName); + + pluginPolicies.put(pluginName, policy); + } + return pluginPolicies; + } + + private static Policy loadPluginPolicy(Path pluginRoot, boolean isModular, String pluginName) throws IOException { + Path policyFile = pluginRoot.resolve(POLICY_FILE_NAME); + + final Set moduleNames = getModuleNames(pluginRoot, isModular); + final Policy policy = parsePolicyIfExists(pluginName, policyFile); + + // TODO: should this check actually be part of the parser? + for (Scope scope : policy.scopes) { + if (moduleNames.contains(scope.name) == false) { + throw new IllegalStateException("policy [" + policyFile + "] contains invalid module [" + scope.name + "]"); + } + } + return policy; + } + + private static Policy parsePolicyIfExists(String pluginName, Path policyFile) throws IOException { + if (Files.exists(policyFile)) { + return new PolicyParser(Files.newInputStream(policyFile, StandardOpenOption.READ), pluginName).parsePolicy(); + } + return new Policy(pluginName, List.of()); + } - // TODO: Configure actual entitlement grants instead of this hardcoded one - Method targetMethod = System.class.getMethod("exit", int.class); - Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.bridge.EntitlementChecker") - .getMethod("checkSystemExit", Class.class, int.class); - Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod); + private static Set getModuleNames(Path pluginRoot, boolean isModular) { + if (isModular) { + ModuleFinder moduleFinder = ModuleFinder.of(pluginRoot); + Set moduleReferences = moduleFinder.findAll(); - inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true); - inst.retransformClasses(System.class); + return moduleReferences.stream().map(mr -> mr.descriptor().name()).collect(Collectors.toUnmodifiableSet()); + } + // When isModular == false we use the same "ALL-UNNAMED" constant as the JDK to indicate (any) unnamed module for this plugin + return Set.of(ALL_UNNAMED); } private static String internalName(Class c) { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java new file mode 100644 index 0000000000000..c20a75a61a608 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation; + +import java.util.List; + +/** + * A structure to use as a representation of the checker method the instrumentation will inject. + * + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the checker method name + * @param parameterDescriptors a list of + * type descriptors) + * for methodName parameters. + */ +public record CheckerMethod(String className, String methodName, List parameterDescriptors) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 25fa84ec7c4ba..12316bfb043c5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation; +import java.io.IOException; import java.lang.reflect.Method; import java.util.Map; @@ -16,10 +17,12 @@ * The SPI service entry point for instrumentation. */ public interface InstrumentationService { - Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); + Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); /** * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline */ MethodKey methodKeyForTarget(Method targetMethod); + + Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java index 54e09c10bcc57..256a4d709d9dc 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java @@ -12,7 +12,10 @@ import java.util.List; /** + * A structure to use as a key/lookup for a method target of instrumentation * - * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the method name + * @param parameterTypes a list of "internal names" for the parameter types */ -public record MethodKey(String className, String methodName, List parameterTypes, boolean isStatic) {} +public record MethodKey(String className, String methodName, List parameterTypes) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 6d5dbd4098aa9..790416ca5659a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,10 +10,8 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; - -import java.util.Optional; +import org.elasticsearch.entitlement.runtime.policy.FlagEntitlementType; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; /** * Implementation of the {@link EntitlementChecker} interface, providing additional @@ -21,51 +19,14 @@ * The trampoline module loads this object via SPI. */ public class ElasticsearchEntitlementChecker implements EntitlementChecker { - private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + private final PolicyManager policyManager; - @Override - public void checkSystemExit(Class callerClass, int status) { - var requestingModule = requestingModule(callerClass); - if (isTriviallyAllowed(requestingModule)) { - return; - } - // Hard-forbidden until we develop the permission granting scheme - throw new NotEntitledException("Missing entitlement for " + requestingModule); + public ElasticsearchEntitlementChecker(PolicyManager policyManager) { + this.policyManager = policyManager; } - private static Module requestingModule(Class callerClass) { - if (callerClass != null) { - Module callerModule = callerClass.getModule(); - if (callerModule.getLayer() != ModuleLayer.boot()) { - // fast path - return callerModule; - } - } - int framesToSkip = 1 // getCallingClass (this method) - + 1 // the checkXxx method - + 1 // the runtime config method - + 1 // the instrumented method - ; - Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) - .walk( - s -> s.skip(framesToSkip) - .map(f -> f.getDeclaringClass().getModule()) - .filter(m -> m.getLayer() != ModuleLayer.boot()) - .findFirst() - ); - return module.orElse(null); - } - - private static boolean isTriviallyAllowed(Module requestingModule) { - if (requestingModule == null) { - logger.debug("Trivially allowed: Entire call stack is in the boot module layer"); - return true; - } - if (requestingModule == System.class.getModule()) { - logger.debug("Trivially allowed: Caller is in {}", System.class.getModule().getName()); - return true; - } - logger.trace("Not trivially allowed"); - return false; + @Override + public void check$java_lang_System$exit(Class callerClass, int status) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.SYSTEM_EXIT); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java similarity index 57% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java index f7ee88c715dfa..60490baf41a10 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java @@ -7,15 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.gradle.internal.test; - -/** - * Any object that can produce an accompanying stop task, meant to tear down - * a previously instantiated service. - */ -public interface Fixture { - - /** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */ - Object getStopTask(); +package org.elasticsearch.entitlement.runtime.policy; +public enum FlagEntitlementType { + SYSTEM_EXIT; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java new file mode 100644 index 0000000000000..c06dc09758de5 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.core.Strings; +import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class PolicyManager { + private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + + protected final Policy serverPolicy; + protected final Map pluginPolicies; + private final Function, String> pluginResolver; + + public static final String ALL_UNNAMED = "ALL-UNNAMED"; + + private static final Set systemModules = findSystemModules(); + + private static Set findSystemModules() { + var systemModulesDescriptors = ModuleFinder.ofSystem() + .findAll() + .stream() + .map(ModuleReference::descriptor) + .collect(Collectors.toUnmodifiableSet()); + + return ModuleLayer.boot() + .modules() + .stream() + .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) + .collect(Collectors.toUnmodifiableSet()); + } + + public PolicyManager(Policy defaultPolicy, Map pluginPolicies, Function, String> pluginResolver) { + this.serverPolicy = Objects.requireNonNull(defaultPolicy); + this.pluginPolicies = Collections.unmodifiableMap(Objects.requireNonNull(pluginPolicies)); + this.pluginResolver = pluginResolver; + } + + public void checkFlagEntitlement(Class callerClass, FlagEntitlementType type) { + var requestingModule = requestingModule(callerClass); + if (isTriviallyAllowed(requestingModule)) { + return; + } + + // TODO: real policy check. For now, we only allow our hardcoded System.exit policy for server. + // TODO: this will be checked using policies + if (requestingModule.isNamed() + && requestingModule.getName().equals("org.elasticsearch.server") + && type == FlagEntitlementType.SYSTEM_EXIT) { + logger.debug("Allowed: caller [{}] in module [{}] has entitlement [{}]", callerClass, requestingModule.getName(), type); + return; + } + + // TODO: plugins policy check using pluginResolver and pluginPolicies + throw new NotEntitledException( + Strings.format("Missing entitlement [%s] for caller [%s] in module [%s]", type, callerClass, requestingModule.getName()) + ); + } + + private static Module requestingModule(Class callerClass) { + if (callerClass != null) { + Module callerModule = callerClass.getModule(); + if (systemModules.contains(callerModule) == false) { + // fast path + return callerModule; + } + } + int framesToSkip = 1 // getCallingClass (this method) + + 1 // the checkXxx method + + 1 // the runtime config method + + 1 // the instrumented method + ; + Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) + .walk( + s -> s.skip(framesToSkip) + .map(f -> f.getDeclaringClass().getModule()) + .filter(m -> systemModules.contains(m) == false) + .findFirst() + ); + return module.orElse(null); + } + + private static boolean isTriviallyAllowed(Module requestingModule) { + if (requestingModule == null) { + logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); + return true; + } + logger.trace("Not trivially allowed"); + return false; + } + + @Override + public String toString() { + return "PolicyManager{" + "serverPolicy=" + serverPolicy + ", pluginPolicies=" + pluginPolicies + '}'; + } +} diff --git a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java index e146b06fe3f53..cb90d10665659 100644 --- a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java +++ b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/ingest/PipelineConfigurationBridge.java @@ -28,8 +28,12 @@ public String getId() { return delegate.getId(); } - public Map getConfigAsMap() { - return delegate.getConfigAsMap(); + public Map getConfig() { + return delegate.getConfig(); + } + + public Map getConfig(final boolean unmodifiable) { + return delegate.getConfig(unmodifiable); } @Override diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index d65622280ee9e..94fdddf6d711a 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -18,7 +18,7 @@ esplugin { restResources { restApi { - include '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' + include 'capabilities', '_common', 'indices', 'cluster', 'index', 'search', 'nodes', 'bulk', 'scripts_painless_execute', 'put_script' } restTests { // Pulls in all aggregation tests from core AND the forwards v7's core for forwards compatibility diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 2b4fea0327e86..203105edc5a24 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -177,65 +178,66 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { // Buckets are ordered into groups - [keyed filters] [key1&key2 intersects] - int maxOrd = owningBucketOrds.length * totalNumKeys; - int totalBucketsToBuild = 0; - for (int ord = 0; ord < maxOrd; ord++) { + long maxOrd = owningBucketOrds.size() * totalNumKeys; + long totalBucketsToBuild = 0; + for (long ord = 0; ord < maxOrd; ord++) { if (bucketDocCount(ord) > 0) { totalBucketsToBuild++; } } - long[] bucketOrdsToBuild = new long[totalBucketsToBuild]; - int builtBucketIndex = 0; - for (int ord = 0; ord < maxOrd; ord++) { - if (bucketDocCount(ord) > 0) { - bucketOrdsToBuild[builtBucketIndex++] = ord; - } - } - assert builtBucketIndex == totalBucketsToBuild; - builtBucketIndex = 0; - var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < owningBucketOrds.length; owningBucketOrdIdx++) { - List buckets = new ArrayList<>(filters.length); - for (int i = 0; i < keys.length; i++) { - long bucketOrd = bucketOrd(owningBucketOrds[owningBucketOrdIdx], i); - long docCount = bucketDocCount(bucketOrd); - // Empty buckets are not returned because this aggregation will commonly be used under a - // a date-histogram where we will look for transactions over time and can expect many - // empty buckets. - if (docCount > 0) { - InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( - keys[i], - docCount, - bucketSubAggs.apply(builtBucketIndex++) - ); - buckets.add(bucket); + try (LongArray bucketOrdsToBuild = bigArrays().newLongArray(totalBucketsToBuild)) { + int[] builtBucketIndex = new int[] { 0 }; + for (int ord = 0; ord < maxOrd; ord++) { + if (bucketDocCount(ord) > 0) { + bucketOrdsToBuild.set(builtBucketIndex[0]++, ord); } } - int pos = keys.length; - for (int i = 0; i < keys.length; i++) { - for (int j = i + 1; j < keys.length; j++) { - long bucketOrd = bucketOrd(owningBucketOrds[owningBucketOrdIdx], pos); + assert builtBucketIndex[0] == totalBucketsToBuild; + builtBucketIndex[0] = 0; + var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); + InternalAggregation[] aggregations = buildAggregations(Math.toIntExact(owningBucketOrds.size()), owningBucketOrdIdx -> { + List buckets = new ArrayList<>(filters.length); + for (int i = 0; i < keys.length; i++) { + long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), i); long docCount = bucketDocCount(bucketOrd); - // Empty buckets are not returned due to potential for very sparse matrices + // Empty buckets are not returned because this aggregation will commonly be used under a + // a date-histogram where we will look for transactions over time and can expect many + // empty buckets. if (docCount > 0) { - String intersectKey = keys[i] + separator + keys[j]; + checkRealMemoryCBForInternalBucket(); InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( - intersectKey, + keys[i], docCount, - bucketSubAggs.apply(builtBucketIndex++) + bucketSubAggs.apply(builtBucketIndex[0]++) ); buckets.add(bucket); } - pos++; } - } - results[owningBucketOrdIdx] = new InternalAdjacencyMatrix(name, buckets, metadata()); + int pos = keys.length; + for (int i = 0; i < keys.length; i++) { + for (int j = i + 1; j < keys.length; j++) { + long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), pos); + long docCount = bucketDocCount(bucketOrd); + // Empty buckets are not returned due to potential for very sparse matrices + if (docCount > 0) { + String intersectKey = keys[i] + separator + keys[j]; + InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( + intersectKey, + docCount, + bucketSubAggs.apply(builtBucketIndex[0]++) + ); + buckets.add(bucket); + } + pos++; + } + } + return new InternalAdjacencyMatrix(name, buckets, metadata()); + }); + assert builtBucketIndex[0] == totalBucketsToBuild; + return aggregations; } - assert builtBucketIndex == totalBucketsToBuild; - return results; } @Override diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index d4e1c2928c441..6add1b0ac4a13 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -141,7 +141,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag protected final InternalAggregation[] buildAggregations( LongKeyedBucketOrds bucketOrds, LongToIntFunction roundingIndexFor, - long[] owningBucketOrds + LongArray owningBucketOrds ) throws IOException { return buildAggregationsForVariableBuckets( owningBucketOrds, @@ -324,7 +324,7 @@ private void increaseRoundingIfNeeded(long rounded) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregations(bucketOrds, l -> roundingIdx, owningBucketOrds); } @@ -594,7 +594,7 @@ private void rebucket() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Rebucket before building the aggregation to build as small as result * as possible. diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index c74637330dd7a..369ae4590fe97 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -11,6 +11,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.RoutingPathFields; @@ -30,6 +32,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; @@ -67,42 +70,40 @@ public TimeSeriesAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { BytesRef spare = new BytesRef(); - InternalTimeSeries.InternalBucket[][] allBucketsPerOrd = new InternalTimeSeries.InternalBucket[owningBucketOrds.length][]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - List buckets = new ArrayList<>(); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - ordsEnum.readValue(spare); - InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( - BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. - docCount, - null, - keyed - ); - bucket.bucketOrd = ordsEnum.ord(); - buckets.add(bucket); - if (buckets.size() >= size) { - break; + try (ObjectArray allBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < allBucketsPerOrd.size(); ordIdx++) { + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + List buckets = new ArrayList<>(); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + ordsEnum.readValue(spare); + checkRealMemoryCBForInternalBucket(); + InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( + BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. + docCount, + null, + keyed + ); + bucket.bucketOrd = ordsEnum.ord(); + buckets.add(bucket); + if (buckets.size() >= size) { + break; + } } + // NOTE: after introducing _tsid hashing time series are sorted by (_tsid hash, @timestamp) instead of (_tsid, timestamp). + // _tsid hash and _tsid might sort differently, and out of order data might result in incorrect buckets due to _tsid value + // changes not matching _tsid hash changes. Changes in _tsid hash are handled creating a new bucket as a result of making + // the assumption that sorting data results in new buckets whenever there is a change in _tsid hash. This is no true anymore + // because we collect data sorted on (_tsid hash, timestamp) but build aggregation results sorted by (_tsid, timestamp). + buckets.sort(Comparator.comparing(bucket -> bucket.key)); + allBucketsPerOrd.set(ordIdx, buckets.toArray(new InternalTimeSeries.InternalBucket[0])); } - // NOTE: after introducing _tsid hashing time series are sorted by (_tsid hash, @timestamp) instead of (_tsid, timestamp). - // _tsid hash and _tsid might sort differently, and out of order data might result in incorrect buckets due to _tsid value - // changes not matching _tsid hash changes. Changes in _tsid hash are handled creating a new bucket as a result of making - // the assumption that sorting data results in new buckets whenever there is a change in _tsid hash. This is no true anymore - // because we collect data sorted on (_tsid hash, timestamp) but build aggregation results sorted by (_tsid, timestamp). - buckets.sort(Comparator.comparing(bucket -> bucket.key)); - allBucketsPerOrd[ordIdx] = buckets.toArray(new InternalTimeSeries.InternalBucket[0]); - } - buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(allBucketsPerOrd[ordIdx]); + return buildAggregations(Math.toIntExact(allBucketsPerOrd.size()), ordIdx -> buildResult(allBucketsPerOrd.get(ordIdx))); } - return result; } @Override @@ -185,7 +186,7 @@ public void collect(int doc, long bucket) throws IOException { } InternalTimeSeries buildResult(InternalTimeSeries.InternalBucket[] topBuckets) { - return new InternalTimeSeries(name, List.of(topBuckets), keyed, metadata()); + return new InternalTimeSeries(name, Arrays.asList(topBuckets), keyed, metadata()); } @FunctionalInterface diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml index 5b2c2dc379cb9..4d8efe2a6f9d8 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/random_sampler.yml @@ -142,6 +142,66 @@ setup: } - match: { aggregations.sampled.mean.value: 1.0 } --- +"Test random_sampler aggregation with scored subagg": + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ random_sampler_with_scored_subaggs ] + test_runner_features: capabilities + reason: "Support for random sampler with scored subaggs capability required" + - do: + search: + index: data + size: 0 + body: > + { + "query": { + "function_score": { + "random_score": {} + } + }, + "aggs": { + "sampled": { + "random_sampler": { + "probability": 0.5 + }, + "aggs": { + "top": { + "top_hits": {} + } + } + } + } + } + - is_true: aggregations.sampled.top.hits + - do: + search: + index: data + size: 0 + body: > + { + "query": { + "function_score": { + "random_score": {} + } + }, + "aggs": { + "sampled": { + "random_sampler": { + "probability": 1.0 + }, + "aggs": { + "top": { + "top_hits": {} + } + } + } + } + } + - match: { aggregations.sampled.top.hits.total.value: 6 } + - is_true: aggregations.sampled.top.hits.hits.0._score +--- "Test random_sampler aggregation with poor settings": - requires: cluster_features: ["gte_v8.2.0"] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java new file mode 100644 index 0000000000000..fdc96892d4b27 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class ReindexDataStreamTransportActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(DataStreamsPlugin.class); + } + + public void testNonExistentDataStream() { + String nonExistentDataStreamName = randomAlphaOfLength(50); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(nonExistentDataStreamName); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest) + .actionGet() + ); + } + + public void testAlreadyUpToDateDataStream() throws Exception { + String dataStreamName = randomAlphaOfLength(50).toLowerCase(Locale.ROOT); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(dataStreamName); + createDataStream(dataStreamName); + ReindexDataStreamResponse response = client().execute( + new ActionType(ReindexDataStreamAction.NAME), + reindexDataStreamRequest + ).actionGet(); + String persistentTaskId = response.getTaskId(); + assertThat(persistentTaskId, equalTo("reindex-data-stream-" + dataStreamName)); + AtomicReference runningTask = new AtomicReference<>(); + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + TaskManager taskManager = transportService.getTaskManager(); + Map tasksMap = taskManager.getCancellableTasks(); + Optional> optionalTask = taskManager.getCancellableTasks() + .entrySet() + .stream() + .filter(entry -> entry.getValue().getType().equals("persistent")) + .filter( + entry -> entry.getValue() instanceof ReindexDataStreamTask + && persistentTaskId.equals((((ReindexDataStreamTask) entry.getValue()).getPersistentTaskId())) + ) + .findAny(); + optionalTask.ifPresent( + longCancellableTaskEntry -> runningTask.compareAndSet(null, (ReindexDataStreamTask) longCancellableTaskEntry.getValue()) + ); + } + ReindexDataStreamTask task = runningTask.get(); + assertNotNull(task); + assertThat(task.getStatus().complete(), equalTo(true)); + assertNull(task.getStatus().exception()); + assertThat(task.getStatus().pending(), equalTo(0)); + assertThat(task.getStatus().inProgress(), equalTo(0)); + assertThat(task.getStatus().errors().size(), equalTo(0)); + } + + private void createDataStream(String dataStreamName) { + final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest = + new TransportPutComposableIndexTemplateAction.Request("my-template"); + putComposableTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .template(Template.builder().build()) + .build() + ); + final AcknowledgedResponse putComposableTemplateResponse = safeGet( + client().execute(TransportPutComposableIndexTemplateAction.TYPE, putComposableTemplateRequest) + ); + assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); + + final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + dataStreamName + ); + final AcknowledgedResponse createDataStreamResponse = safeGet( + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + ); + assertThat(createDataStreamResponse.isAcknowledged(), is(true)); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + } + + private void indexDocs(String dataStreamName) { + int docs = randomIntBetween(5, 10); + CountDownLatch countDownLatch = new CountDownLatch(docs); + for (int i = 0; i < docs; i++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = "{ \"@timestamp\": \"2099-05-06T16:21:15.000Z\", \"message\": \"something cool happened\" }"; + indexRequest.source(doc, XContentType.JSON); + client().index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(DocWriteResponse docWriteResponse) { + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail("Indexing request should have succeeded eventually, failed with " + e.getMessage()); + } + }); + } + safeAwait(countDownLatch); + } + +} diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java index 01c63be448e62..2dc6fd84fdfe7 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/AbstractDataStreamIT.java @@ -45,6 +45,7 @@ public abstract class AbstractDataStreamIT extends ESRestTestCase { // tests such as testIgnoreDynamicBeyondLimit. .setting("xpack.apm_data.enabled", "false") .setting("xpack.otel_data.registry.enabled", "false") + .setting("cluster.logsdb.enabled", "false") .build(); protected RestClient client; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index cb7445705537a..2f3b63d27ca35 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -19,19 +19,23 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.action.CreateDataStreamTransportAction; @@ -40,6 +44,7 @@ import org.elasticsearch.datastreams.action.MigrateToDataStreamTransportAction; import org.elasticsearch.datastreams.action.ModifyDataStreamsTransportAction; import org.elasticsearch.datastreams.action.PromoteDataStreamTransportAction; +import org.elasticsearch.datastreams.action.ReindexDataStreamTransportAction; import org.elasticsearch.datastreams.action.TransportGetDataStreamsAction; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; @@ -73,14 +78,27 @@ import org.elasticsearch.datastreams.rest.RestMigrateToDataStreamAction; import org.elasticsearch.datastreams.rest.RestModifyDataStreamsAction; import org.elasticsearch.datastreams.rest.RestPromoteDataStreamAction; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskExecutor; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskState; +import org.elasticsearch.datastreams.task.ReindexDataStreamStatus; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.HealthPlugin; +import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.time.Clock; @@ -93,7 +111,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; -public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin { +public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin, PersistentTaskPlugin { public static final Setting TIME_SERIES_POLL_INTERVAL = Setting.timeSetting( "time_series.poll_interval", @@ -244,6 +262,7 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class)); actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class)); } + actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); return actions; } @@ -302,4 +321,48 @@ public void close() throws IOException { public Collection getHealthIndicatorServices() { return List.of(dataStreamLifecycleHealthIndicatorService.get()); } + + @Override + public List getNamedXContent() { + return List.of( + new NamedXContentRegistry.Entry( + PersistentTaskState.class, + new ParseField(ReindexDataStreamPersistentTaskState.NAME), + ReindexDataStreamPersistentTaskState::fromXContent + ), + new NamedXContentRegistry.Entry( + PersistentTaskParams.class, + new ParseField(ReindexDataStreamTaskParams.NAME), + ReindexDataStreamTaskParams::fromXContent + ) + ); + } + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry( + PersistentTaskState.class, + ReindexDataStreamPersistentTaskState.NAME, + ReindexDataStreamPersistentTaskState::new + ), + new NamedWriteableRegistry.Entry( + PersistentTaskParams.class, + ReindexDataStreamTaskParams.NAME, + ReindexDataStreamTaskParams::new + ), + new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new) + ); + } + + @Override + public List> getPersistentTasksExecutor( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + SettingsModule settingsModule, + IndexNameExpressionResolver expressionResolver + ) { + return List.of(new ReindexDataStreamPersistentTaskExecutor(client, clusterService, ReindexDataStreamTask.TASK_NAME, threadPool)); + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java new file mode 100644 index 0000000000000..0a86985c6c7b2 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/* + * This transport action creates a new persistent task for reindexing the source data stream given in the request. On successful creation + * of the persistent task, it responds with the persistent task id so that the user can monitor the persistent task. + */ +public class ReindexDataStreamTransportAction extends HandledTransportAction { + private final PersistentTasksService persistentTasksService; + private final TransportService transportService; + private final ClusterService clusterService; + + @Inject + public ReindexDataStreamTransportAction( + TransportService transportService, + ActionFilters actionFilters, + PersistentTasksService persistentTasksService, + ClusterService clusterService + ) { + super( + ReindexDataStreamAction.NAME, + true, + transportService, + actionFilters, + ReindexDataStreamRequest::new, + transportService.getThreadPool().executor(ThreadPool.Names.GENERIC) + ); + this.transportService = transportService; + this.persistentTasksService = persistentTasksService; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, ReindexDataStreamRequest request, ActionListener listener) { + String sourceDataStreamName = request.getSourceDataStream(); + Metadata metadata = clusterService.state().metadata(); + DataStream dataStream = metadata.dataStreams().get(sourceDataStreamName); + if (dataStream == null) { + listener.onFailure(new ResourceNotFoundException("Data stream named [{}] does not exist", sourceDataStreamName)); + return; + } + int totalIndices = dataStream.getIndices().size(); + int totalIndicesToBeUpgraded = (int) dataStream.getIndices() + .stream() + .filter(index -> metadata.index(index).getCreationVersion().isLegacyIndexVersion()) + .count(); + ReindexDataStreamTaskParams params = new ReindexDataStreamTaskParams( + sourceDataStreamName, + transportService.getThreadPool().absoluteTimeInMillis(), + totalIndices, + totalIndicesToBeUpgraded + ); + String persistentTaskId = getPersistentTaskId(sourceDataStreamName); + persistentTasksService.sendStartRequest( + persistentTaskId, + ReindexDataStreamTask.TASK_NAME, + params, + null, + ActionListener.wrap(startedTask -> listener.onResponse(new ReindexDataStreamResponse(persistentTaskId)), listener::onFailure) + ); + } + + private String getPersistentTaskId(String dataStreamName) throws ResourceAlreadyExistsException { + return "reindex-data-stream-" + dataStreamName; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java new file mode 100644 index 0000000000000..f10d2e7b356fb --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExecutor { + private static final TimeValue TASK_KEEP_ALIVE_TIME = TimeValue.timeValueDays(1); + private final Client client; + private final ClusterService clusterService; + private final ThreadPool threadPool; + + public ReindexDataStreamPersistentTaskExecutor(Client client, ClusterService clusterService, String taskName, ThreadPool threadPool) { + super(taskName, threadPool.generic()); + this.client = client; + this.clusterService = clusterService; + this.threadPool = threadPool; + } + + @Override + protected ReindexDataStreamTask createTask( + long id, + String type, + String action, + TaskId parentTaskId, + PersistentTasksCustomMetadata.PersistentTask taskInProgress, + Map headers + ) { + ReindexDataStreamTaskParams params = taskInProgress.getParams(); + return new ReindexDataStreamTask( + params.startTime(), + params.totalIndices(), + params.totalIndicesToBeUpgraded(), + threadPool, + id, + type, + action, + "id=" + taskInProgress.getId(), + parentTaskId, + headers + ); + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, ReindexDataStreamTaskParams params, PersistentTaskState state) { + String sourceDataStream = params.getSourceDataStream(); + GetDataStreamAction.Request request = new GetDataStreamAction.Request(TimeValue.MAX_VALUE, new String[] { sourceDataStream }); + assert task instanceof ReindexDataStreamTask; + final ReindexDataStreamTask reindexDataStreamTask = (ReindexDataStreamTask) task; + client.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { + List dataStreamInfos = response.getDataStreams(); + if (dataStreamInfos.size() == 1) { + List indices = dataStreamInfos.getFirst().getDataStream().getIndices(); + List indicesToBeReindexed = indices.stream() + .filter(index -> clusterService.state().getMetadata().index(index).getCreationVersion().isLegacyIndexVersion()) + .toList(); + reindexDataStreamTask.setPendingIndices(indicesToBeReindexed.stream().map(Index::getName).toList()); + for (Index index : indicesToBeReindexed) { + // TODO This is just a placeholder. This is where the real data stream reindex logic will go + } + + completeSuccessfulPersistentTask(reindexDataStreamTask); + } else { + completeFailedPersistentTask(reindexDataStreamTask, new ElasticsearchException("data stream does not exist")); + } + }, reindexDataStreamTask::markAsFailed)); + } + + private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { + persistentTask.reindexSucceeded(); + threadPool.schedule(persistentTask::markAsCompleted, getTimeToLive(persistentTask), threadPool.generic()); + } + + private void completeFailedPersistentTask(ReindexDataStreamTask persistentTask, Exception e) { + persistentTask.reindexFailed(e); + threadPool.schedule(() -> persistentTask.markAsFailed(e), getTimeToLive(persistentTask), threadPool.generic()); + } + + private TimeValue getTimeToLive(ReindexDataStreamTask reindexDataStreamTask) { + PersistentTasksCustomMetadata persistentTasksCustomMetadata = clusterService.state() + .getMetadata() + .custom(PersistentTasksCustomMetadata.TYPE); + PersistentTasksCustomMetadata.PersistentTask persistentTask = persistentTasksCustomMetadata.getTask( + reindexDataStreamTask.getPersistentTaskId() + ); + PersistentTaskState state = persistentTask.getState(); + final long completionTime; + if (state == null) { + completionTime = threadPool.absoluteTimeInMillis(); + reindexDataStreamTask.updatePersistentTaskState( + new ReindexDataStreamPersistentTaskState(completionTime), + ActionListener.noop() + ); + } else { + completionTime = ((ReindexDataStreamPersistentTaskState) state).completionTime(); + } + return TimeValue.timeValueMillis(TASK_KEEP_ALIVE_TIME.millis() - (threadPool.absoluteTimeInMillis() - completionTime)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java new file mode 100644 index 0000000000000..d6f32a3d34a7a --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamPersistentTaskState(long completionTime) implements Task.Status, PersistentTaskState { + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String COMPLETION_TIME_FIELD = "completion_time"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamPersistentTaskState((long) args[0]) + ); + static { + PARSER.declareLong(constructorArg(), new ParseField(COMPLETION_TIME_FIELD)); + } + + public ReindexDataStreamPersistentTaskState(StreamInput in) throws IOException { + this(in.readLong()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(completionTime); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COMPLETION_TIME_FIELD, completionTime); + builder.endObject(); + return builder; + } + + public static ReindexDataStreamPersistentTaskState fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java new file mode 100644 index 0000000000000..10dfded853a13 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record ReindexDataStreamStatus( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + boolean complete, + Exception exception, + int inProgress, + int pending, + List> errors +) implements Task.Status { + public ReindexDataStreamStatus { + Objects.requireNonNull(errors); + } + + public static final String NAME = "ReindexDataStreamStatus"; + + public ReindexDataStreamStatus(StreamInput in) throws IOException { + this( + in.readLong(), + in.readInt(), + in.readInt(), + in.readBoolean(), + in.readException(), + in.readInt(), + in.readInt(), + in.readCollectionAsList(in1 -> Tuple.tuple(in1.readString(), in1.readException())) + ); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(persistentTaskStartTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + out.writeBoolean(complete); + out.writeException(exception); + out.writeInt(inProgress); + out.writeInt(pending); + out.writeCollection(errors, (out1, tuple) -> { + out1.writeString(tuple.v1()); + out1.writeException(tuple.v2()); + }); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("start_time", persistentTaskStartTime); + builder.field("complete", complete); + builder.field("total_indices", totalIndices); + builder.field("total_indices_requiring_upgrade", totalIndicesToBeUpgraded); + builder.field("successes", totalIndicesToBeUpgraded - (inProgress + pending + errors.size())); + builder.field("in_progress", inProgress); + builder.field("pending", pending); + builder.startArray("errors"); + for (Tuple error : errors) { + builder.startObject(); + builder.field("index", error.v1()); + builder.field("message", error.v2().getMessage()); + builder.endObject(); + } + builder.endArray(); + if (exception != null) { + builder.field("exception", exception.getMessage()); + } + builder.endObject(); + return builder; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java new file mode 100644 index 0000000000000..2ae244679659f --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.core.Tuple; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamTask extends AllocatedPersistentTask { + public static final String TASK_NAME = "reindex-data-stream"; + private final long persistentTaskStartTime; + private final int totalIndices; + private final int totalIndicesToBeUpgraded; + private final ThreadPool threadPool; + private boolean complete = false; + private Exception exception; + private List inProgress = new ArrayList<>(); + private List pending = List.of(); + private List> errors = new ArrayList<>(); + + public ReindexDataStreamTask( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + ThreadPool threadPool, + long id, + String type, + String action, + String description, + TaskId parentTask, + Map headers + ) { + super(id, type, action, description, parentTask, headers); + this.persistentTaskStartTime = persistentTaskStartTime; + this.totalIndices = totalIndices; + this.totalIndicesToBeUpgraded = totalIndicesToBeUpgraded; + this.threadPool = threadPool; + } + + @Override + public ReindexDataStreamStatus getStatus() { + return new ReindexDataStreamStatus( + persistentTaskStartTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress.size(), + pending.size(), + errors + ); + } + + public void reindexSucceeded() { + this.complete = true; + } + + public void reindexFailed(Exception e) { + this.complete = true; + this.exception = e; + } + + public void setInProgressIndices(List inProgressIndices) { + this.inProgress = inProgressIndices; + } + + public void setPendingIndices(List pendingIndices) { + this.pending = pendingIndices; + } + + public void addErrorIndex(String index, Exception error) { + this.errors.add(Tuple.tuple(index, error)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java new file mode 100644 index 0000000000000..5efbc6b672216 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamTaskParams(String sourceDataStream, long startTime, int totalIndices, int totalIndicesToBeUpgraded) + implements + PersistentTaskParams { + + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String SOURCE_DATA_STREAM_FIELD = "source_data_stream"; + private static final String START_TIME_FIELD = "start_time"; + private static final String TOTAL_INDICES_FIELD = "total_indices"; + private static final String TOTAL_INDICES_TO_BE_UPGRADED_FIELD = "total_indices_to_be_upgraded"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamTaskParams((String) args[0], (long) args[1], (int) args[2], (int) args[3]) + ); + static { + PARSER.declareString(constructorArg(), new ParseField(SOURCE_DATA_STREAM_FIELD)); + PARSER.declareLong(constructorArg(), new ParseField(START_TIME_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_TO_BE_UPGRADED_FIELD)); + } + + public ReindexDataStreamTaskParams(StreamInput in) throws IOException { + this(in.readString(), in.readLong(), in.readInt(), in.readInt()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REINDEX_DATA_STREAMS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(sourceDataStream); + out.writeLong(startTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field(SOURCE_DATA_STREAM_FIELD, sourceDataStream) + .field(START_TIME_FIELD, startTime) + .field(TOTAL_INDICES_FIELD, totalIndices) + .field(TOTAL_INDICES_TO_BE_UPGRADED_FIELD, totalIndicesToBeUpgraded) + .endObject(); + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + public static ReindexDataStreamTaskParams fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java new file mode 100644 index 0000000000000..be11bff131909 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class ReindexDataStreamPersistentTaskStateTests extends AbstractXContentSerializingTestCase { + @Override + protected ReindexDataStreamPersistentTaskState doParseInstance(XContentParser parser) throws IOException { + return ReindexDataStreamPersistentTaskState.fromXContent(parser); + } + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamPersistentTaskState::new; + } + + @Override + protected ReindexDataStreamPersistentTaskState createTestInstance() { + return new ReindexDataStreamPersistentTaskState(randomNegativeLong()); + } + + @Override + protected ReindexDataStreamPersistentTaskState mutateInstance(ReindexDataStreamPersistentTaskState instance) throws IOException { + return new ReindexDataStreamPersistentTaskState(instance.completionTime() + 1); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java new file mode 100644 index 0000000000000..8f0fabc2ce7ee --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static java.util.Map.entry; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamStatusTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamStatus::new; + } + + @Override + protected ReindexDataStreamStatus createTestInstance() { + return new ReindexDataStreamStatus( + randomLong(), + randomNegativeInt(), + randomNegativeInt(), + randomBoolean(), + nullableTestException(), + randomNegativeInt(), + randomNegativeInt(), + randomErrorList() + ); + } + + private Exception nullableTestException() { + if (randomBoolean()) { + return testException(); + } + return null; + } + + private Exception testException() { + /* + * Unfortunately ElasticsearchException doesn't have an equals and just falls back to Object::equals. So we can't test for equality + * when we're using an exception. So always just use null. + */ + return null; + } + + private List randomList() { + return randomList(0); + } + + private List randomList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> randomAlphaOfLength(50)); + } + + private List> randomErrorList() { + return randomErrorList(0); + } + + private List> randomErrorList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> Tuple.tuple(randomAlphaOfLength(30), testException())); + } + + @Override + protected ReindexDataStreamStatus mutateInstance(ReindexDataStreamStatus instance) throws IOException { + long startTime = instance.persistentTaskStartTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + boolean complete = instance.complete(); + Exception exception = instance.exception(); + int inProgress = instance.inProgress(); + int pending = instance.pending(); + List> errors = instance.errors(); + switch (randomIntBetween(0, 6)) { + case 0 -> startTime = randomLong(); + case 1 -> totalIndices = totalIndices + 1; + case 2 -> totalIndicesToBeUpgraded = totalIndicesToBeUpgraded + 1; + case 3 -> complete = complete == false; + case 4 -> inProgress = inProgress + 1; + case 5 -> pending = pending + 1; + case 6 -> errors = randomErrorList(errors.size() + 1); + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamStatus( + startTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress, + pending, + errors + ); + } + + public void testToXContent() throws IOException { + ReindexDataStreamStatus status = new ReindexDataStreamStatus( + 1234L, + 200, + 100, + true, + new ElasticsearchException("the whole task failed"), + 12, + 8, + List.of( + Tuple.tuple("index7", new ElasticsearchException("index7 failed")), + Tuple.tuple("index8", new ElasticsearchException("index8 " + "failed")) + ) + ); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + status.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat( + parserMap, + equalTo( + Map.ofEntries( + entry("start_time", 1234), + entry("total_indices", 200), + entry("total_indices_requiring_upgrade", 100), + entry("complete", true), + entry("exception", "the whole task failed"), + entry("successes", 78), + entry("in_progress", 12), + entry("pending", 8), + entry( + "errors", + List.of( + Map.of("index", "index7", "message", "index7 failed"), + Map.of("index", "index8", "message", "index8 failed") + ) + ) + ) + ) + ); + } + } + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java new file mode 100644 index 0000000000000..55098bf4a68d5 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamTaskParamsTests extends AbstractXContentSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamTaskParams::new; + } + + @Override + protected ReindexDataStreamTaskParams createTestInstance() { + return new ReindexDataStreamTaskParams(randomAlphaOfLength(50), randomLong(), randomNonNegativeInt(), randomNonNegativeInt()); + } + + @Override + protected ReindexDataStreamTaskParams mutateInstance(ReindexDataStreamTaskParams instance) { + String sourceDataStream = instance.sourceDataStream(); + long startTime = instance.startTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + switch (randomIntBetween(0, 3)) { + case 0 -> sourceDataStream = randomAlphaOfLength(50); + case 1 -> startTime = randomLong(); + case 2 -> totalIndices = totalIndices + 1; + case 3 -> totalIndices = totalIndicesToBeUpgraded + 1; + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamTaskParams(sourceDataStream, startTime, totalIndices, totalIndicesToBeUpgraded); + } + + @Override + protected ReindexDataStreamTaskParams doParseInstance(XContentParser parser) { + return ReindexDataStreamTaskParams.fromXContent(parser); + } + + public void testToXContent() throws IOException { + ReindexDataStreamTaskParams params = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + params.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat(parserMap.get("source_data_stream"), equalTo(params.sourceDataStream())); + assertThat(((Number) parserMap.get("start_time")).longValue(), equalTo(params.startTime())); + } + } + } +} diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 61ca050d91c13..2f96aa3cbc69a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -268,7 +268,7 @@ private static Set pipelinesWithGeoIpProcessor(ClusterState clusterState Set ids = new HashSet<>(); // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph for (PipelineConfiguration configuration : configurations) { - List> processors = (List>) configuration.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); + List> processors = (List>) configuration.getConfig().get(Pipeline.PROCESSORS_KEY); if (hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation)) { ids.add(configuration.getId()); } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt index e76db7cfb1d26..5a1d8c002aa17 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt @@ -50,5 +50,7 @@ static_import { double cosineSimilarity(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity double dotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$DotProduct double hamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$Hamming + double maxSimDotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimDotProduct + double maxSimInvHamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimInvHamming } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml new file mode 100644 index 0000000000000..caa7c59ab4c42 --- /dev/null +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml @@ -0,0 +1,206 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ multi_dense_vector_script_max_sim ] + test_runner_features: capabilities + reason: "Support for multi dense vector max-sim functions capability required" + - skip: + features: headers + + - do: + indices.create: + index: test-index + body: + settings: + number_of_shards: 1 + mappings: + properties: + vector: + type: multi_dense_vector + dims: 5 + byte_vector: + type: multi_dense_vector + dims: 5 + element_type: byte + bit_vector: + type: multi_dense_vector + dims: 40 + element_type: bit + - do: + index: + index: test-index + id: "1" + body: + vector: [[230.0, 300.33, -34.8988, 15.555, -200.0], [-0.5, 100.0, -13, 14.8, -156.0]] + byte_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + bit_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + + - do: + index: + index: test-index + id: "3" + body: + vector: [[0.5, 111.3, -13.0, 14.8, -156.0]] + byte_vector: [[2, 18, -5, 0, -124]] + bit_vector: [[2, 18, -5, 0, -124]] + + - do: + indices.refresh: {} +--- +"Test max-sim dot product scoring": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 611.316, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 68.90001, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 230, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 33, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 3, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 2, error: 0.01}} + +# doing max-sim dot product with a vector where the stored bit vectors are used as masks + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]] + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 190, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 125, error: 0.01}} +--- +"Test max-sim inv hamming scoring": + - skip: + features: close_to + + # inv hamming doesn't apply to float vectors + - do: + catch: bad_request + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java index 6985f6da98cf1..12489ad37aabd 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.join.aggregations; import org.apache.lucene.search.Query; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -44,7 +45,7 @@ public ChildrenToParentAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalParent( diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 60412179807a5..1b99d2b34046c 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -115,7 +116,7 @@ public void postCollection() throws IOException { } @Override - protected void prepareSubAggs(long[] ordsToCollect) throws IOException { + protected void prepareSubAggs(LongArray ordsToCollect) throws IOException { IndexReader indexReader = searcher().getIndexReader(); for (LeafReaderContext ctx : indexReader.leaves()) { Scorer childDocsScorer = outFilter.scorer(ctx); @@ -153,9 +154,10 @@ public float score() { * structure that maps a primitive long to a list of primitive * longs. */ - for (long owningBucketOrd : ordsToCollect) { - if (collectionStrategy.exists(owningBucketOrd, globalOrdinal)) { - collectBucket(sub, docId, owningBucketOrd); + for (long ord = 0; ord < ordsToCollect.size(); ord++) { + long ordToCollect = ordsToCollect.get(ord); + if (collectionStrategy.exists(ordToCollect, globalOrdinal)) { + collectBucket(sub, docId, ordToCollect); } } } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java index d8a061a2de6d9..939107f87715d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.join.aggregations; import org.apache.lucene.search.Query; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -40,7 +41,7 @@ public ParentToChildrenAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalChildren( diff --git a/modules/rank-eval/build.gradle b/modules/rank-eval/build.gradle index 511dd7be9ae68..c9016798c18b9 100644 --- a/modules/rank-eval/build.gradle +++ b/modules/rank-eval/build.gradle @@ -25,7 +25,3 @@ testClusters.configureEach { // Modules who's integration is explicitly tested in integration tests module ':modules:lang-mustache' } - -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("rank_eval/30_failures/Response format", "warning does not exist for compatibility") -}) diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java index ead2cb36ad150..dcd29c6d26c6e 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java @@ -51,8 +51,9 @@ protected String getTestRestCluster() { return cluster.getHttpAddresses(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/116811") public void testReloadCredentialsFromKeystore() throws IOException { + assumeFalse("doesn't work in a FIPS JVM, but that's ok", inFipsJvm()); + // Register repository (?verify=false because we don't have access to the blob store yet) final var repositoryName = randomIdentifier(); registerRepository( diff --git a/modules/runtime-fields-common/build.gradle b/modules/runtime-fields-common/build.gradle index e743939cbf79e..e8e06f0a9c4c7 100644 --- a/modules/runtime-fields-common/build.gradle +++ b/modules/runtime-fields-common/build.gradle @@ -22,7 +22,3 @@ dependencies { api project(':libs:grok') api project(':libs:dissect') } - -tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTestsByFilePattern("**/runtime_fields/110_composite.yml", "warning does not exist for compatibility") -}) diff --git a/muted-tests.yml b/muted-tests.yml index 8696a1bddd3d4..f8ab532dcaa94 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -67,9 +67,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} issue: https://github.com/elastic/elasticsearch/issues/113325 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated} - issue: https://github.com/elastic/elasticsearch/pull/113286 - class: org.elasticsearch.integration.KibanaUserRoleIntegTests method: testFieldMappings issue: https://github.com/elastic/elasticsearch/issues/113592 @@ -115,9 +112,6 @@ tests: - class: org.elasticsearch.xpack.shutdown.NodeShutdownIT method: testStalledShardMigrationProperlyDetected issue: https://github.com/elastic/elasticsearch/issues/115697 -- class: org.elasticsearch.xpack.inference.InferenceCrudIT - method: testSupportedStream - issue: https://github.com/elastic/elasticsearch/issues/113430 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Verify start transform reuses destination index} issue: https://github.com/elastic/elasticsearch/issues/115808 @@ -186,8 +180,6 @@ tests: - class: org.elasticsearch.xpack.downsample.ILMDownsampleDisruptionIT method: testILMDownsampleRollingRestart issue: https://github.com/elastic/elasticsearch/issues/114233 -- class: org.elasticsearch.xpack.kql.query.KqlQueryBuilderTests - issue: https://github.com/elastic/elasticsearch/issues/116487 - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests method: testInvalidJSON issue: https://github.com/elastic/elasticsearch/issues/116521 @@ -217,9 +209,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set} issue: https://github.com/elastic/elasticsearch/issues/116777 -- class: org.elasticsearch.repositories.s3.RepositoryS3RestIT - method: testReloadCredentialsFromKeystore - issue: https://github.com/elastic/elasticsearch/issues/116811 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 - class: org.elasticsearch.xpack.esql.analysis.VerifierTests @@ -234,14 +223,36 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} - issue: https://github.com/elastic/elasticsearch/issues/116542 - class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests method: testTermQuery issue: https://github.com/elastic/elasticsearch/issues/116879 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - issue: https://github.com/elastic/elasticsearch/issues/116899 +- class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT + method: testQueryBuilderBWC {p0=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116989 +- class: org.elasticsearch.upgrades.QueryBuilderBWCIT + method: testQueryBuilderBWC {cluster=UPGRADED} + issue: https://github.com/elastic/elasticsearch/issues/116990 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/esql/esql-across-clusters/line_197} + issue: https://github.com/elastic/elasticsearch/issues/117099 +- class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT + method: test {yaml=/10_apm/Test template reinstallation} + issue: https://github.com/elastic/elasticsearch/issues/116445 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testMultipleInferencesTriggeringDownloadAndDeploy + issue: https://github.com/elastic/elasticsearch/issues/117208 +- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/117212 +- class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT + method: testEnterpriseDownloaderTask + issue: https://github.com/elastic/elasticsearch/issues/115163 +- class: org.elasticsearch.versioning.ConcurrentSeqNoVersioningIT + method: testSeqNoCASLinearizability + issue: https://github.com/elastic/elasticsearch/issues/117249 +- class: org.elasticsearch.discovery.ClusterDisruptionIT + method: testAckedIndexing + issue: https://github.com/elastic/elasticsearch/issues/117024 # Examples: # diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index e5efb883a789c..5f0fee6636256 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -54,8 +54,9 @@ tasks.named("yamlRestTest").configure { enabled = false } ['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> TaskProvider fixture = tasks.register("ec2Fixture${action}", AntFixture) { dependsOn project.sourceSets.yamlRestTest.runtimeClasspath - env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" - executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + FileCollection cp = project.sourceSets.yamlRestTest.runtimeClasspath + env 'CLASSPATH', "${-> cp.asPath}" + executable = "${buildParams.runtimeJavaHome.get() }/bin/java" args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/yamlRestTest${action}-1/config/unicast_hosts.txt" } @@ -67,9 +68,18 @@ tasks.named("yamlRestTest").configure { enabled = false } classpath = yamlRestTestSourceSet.getRuntimeClasspath() } + if(action == 'ContainerCredentials') { + def addressAndPortSource = fixture.get().addressAndPortSource + testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { + environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + () -> addressAndPortSource.map{ addr -> "http://${addr}/ecs_credentials_endpoint" }.get(), IGNORE_VALUE + } + } + tasks.named("check").configure { dependsOn(yamlRestTestTask) } + def addressAndPortSource = fixture.get().addressAndPortSource testClusters.matching { it.name == yamlRestTestTask.name}.configureEach { numberOfNodes = ec2NumberOfNodes @@ -77,9 +87,9 @@ tasks.named("yamlRestTest").configure { enabled = false } setting 'discovery.seed_providers', 'ec2' setting 'network.host', '_ec2_' - setting 'discovery.ec2.endpoint', { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + setting 'discovery.ec2.endpoint', { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE - systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE } } @@ -106,11 +116,6 @@ tasks.named("ec2FixtureContainerCredentials").configure { env 'ACTIVATE_CONTAINER_CREDENTIALS', true } -testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { - environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', - { "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE -} - // Extra config for InstanceProfile tasks.named("ec2FixtureInstanceProfile").configure { env 'ACTIVATE_INSTANCE_PROFILE', true diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java index 3a983dbd058df..d98d53baf9015 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartDownsampleIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.junit.Before; import org.junit.ClassRule; import org.junit.rules.RuleChain; @@ -269,10 +268,7 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { - assumeTrue( - "Downsample got many stability improvements in 8.10.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) - ); + assumeTrue("Downsample got many stability improvements in 8.10.0", oldClusterHasFeature("gte_v8.10.0")); if (isRunningAgainstOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 26e4f3146da2f..0f41712abe927 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -41,7 +41,6 @@ import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -262,7 +261,7 @@ public void testNewReplicas() throws Exception { } public void testSearchTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature("gte_v8.2.0")); int numDocs; if (isRunningAgainstOldCluster()) { numDocs = createTimeSeriesModeIndex(1); @@ -300,7 +299,7 @@ public void testSearchTimeSeriesMode() throws Exception { } public void testNewReplicasTimeSeriesMode() throws Exception { - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature("gte_v8.2.0")); if (isRunningAgainstOldCluster()) { createTimeSeriesModeIndex(0); } else { diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java index f1f4fcf091e8f..9866d94dccc3c 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeFullClusterRestartIT.java @@ -17,10 +17,8 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.ClassRule; @@ -31,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeFullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { @ClassRule @@ -125,8 +120,6 @@ protected ElasticsearchCluster getUpgradeCluster() { }"""; public void testLogsIndexing() throws IOException { - assumeTrue("Test uses data streams", oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED)); - if (isRunningAgainstOldCluster()) { assertOK(client().performRequest(putTemplate(client(), "logs-template", STANDARD_TEMPLATE))); assertOK(client().performRequest(createDataStream("logs-apache-production"))); @@ -172,22 +165,16 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 1, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.equalTo("logsdb")); } } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index f6549a2d83fe6..d8f906b23d523 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -11,6 +11,10 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils +import org.elasticsearch.gradle.testclusters.TestClustersPlugin apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -63,6 +67,8 @@ excludeList.add('indices.resolve_index/20_resolve_system_index/*') // Excluded because the error has changed excludeList.add('aggregations/percentiles_hdr_metric/Negative values test') +def clusterPath = getPath() + buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { /* This project runs the core REST tests against a 4 node cluster where two of @@ -84,18 +90,42 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { useCluster baseCluster mustRunAfter("precommit") + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def baseInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterOneNodeUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterTwoNodesUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + def nonInputProps = nonInputProperties + def sharedRepoFolder = new File(buildDir, "cluster/shared/repo/${baseName}") doFirst { - delete("${buildDir}/cluster/shared/repo/${baseName}") + delete(sharedRepoFolder) // Getting the endpoints causes a wait for the cluster - println "Test cluster endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Test cluster endpoints are: ${-> baseInfo.get().join(",")}" println "Upgrading one node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster - println "Upgrade complete, endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Upgrade complete, endpoints are: ${-> baseInfoAfterOneNodeUpdate.get().join(",")}" println "Upgrading another node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() - nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) - nonInputProperties.systemProperty('tests.clustername', baseName) + nonInputProps.systemProperty('tests.rest.cluster', baseInfoAfterTwoNodesUpdate.map(c -> c.join(","))) + nonInputProps.systemProperty('tests.clustername', baseName) if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,7 +133,7 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') - onlyIf("BWC tests disabled") { project.bwc_tests_enabled } +// onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } tasks.register(bwcTaskName(bwcVersion)) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index e0d1e7aafa637..d9adec47ff483 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -14,14 +14,12 @@ import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.desirednodes.UpdateDesiredNodesRequest; import org.elasticsearch.client.Request; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodeWithStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; @@ -43,24 +41,7 @@ public DesiredNodesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { desiredNodesVersion = upgradedNodes + 1; } - private enum ProcessorsPrecision { - DOUBLE, - FLOAT - } - public void testUpgradeDesiredNodes() throws Exception { - assumeTrue("Desired nodes was introduced in 8.1", oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_NODE_API_SUPPORTED)); - - if (oldClusterHasFeature(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED)) { - assertUpgradedNodesCanReadDesiredNodes(); - } else if (oldClusterHasFeature(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED)) { - assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent(); - } else { - assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions(); - } - } - - private void assertUpgradedNodesCanReadDesiredNodes() throws Exception { if (isMixedCluster() || isUpgradedCluster()) { final Map desiredNodes = getLatestDesiredNodes(); final String historyId = extractValue(desiredNodes, "history_id"); @@ -69,60 +50,10 @@ private void assertUpgradedNodesCanReadDesiredNodes() throws Exception { assertThat(version, is(equalTo(desiredNodesVersion - 1))); } - addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion, ProcessorsPrecision.DOUBLE); + addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion); assertAllDesiredNodesAreActualized(); } - private void assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent() throws Exception { - // We define the same set of desired nodes to ensure that they are equal across all - // the test runs, otherwise we cannot guarantee an idempotent update in this test - final var desiredNodes = getNodeNames().stream() - .map( - nodeName -> new DesiredNode( - Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - 1238.49922909, - ByteSizeValue.ofGb(32), - ByteSizeValue.ofGb(128), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() - ) - ) - .toList(); - - if (isMixedCluster()) { - updateDesiredNodes(desiredNodes, desiredNodesVersion - 1); - } - for (int i = 0; i < 2; i++) { - updateDesiredNodes(desiredNodes, desiredNodesVersion); - } - - final Map latestDesiredNodes = getLatestDesiredNodes(); - final int latestDesiredNodesVersion = extractValue(latestDesiredNodes, "version"); - assertThat(latestDesiredNodesVersion, is(equalTo(desiredNodesVersion))); - - if (isUpgradedCluster()) { - assertAllDesiredNodesAreActualized(); - } - } - - private void assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions() throws Exception { - if (isOldCluster()) { - addClusterNodesToDesiredNodesWithIntegerProcessors(1); - } else if (isMixedCluster()) { - // Processor ranges or float processors are forbidden during upgrades: 8.2 -> 8.3 clusters - final var responseException = expectThrows( - ResponseException.class, - () -> addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(desiredNodesVersion, ProcessorsPrecision.FLOAT) - ); - final var statusCode = responseException.getResponse().getStatusLine().getStatusCode(); - assertThat(statusCode, is(equalTo(400))); - } else { - assertAllDesiredNodesAreActualized(); - addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(4, ProcessorsPrecision.FLOAT); - } - - getLatestDesiredNodes(); - } - private Map getLatestDesiredNodes() throws IOException { final var getDesiredNodesRequest = new Request("GET", "/_internal/desired_nodes/_latest"); final var response = client().performRequest(getDesiredNodesRequest); @@ -143,15 +74,14 @@ private void assertAllDesiredNodesAreActualized() throws Exception { } } - private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int version, ProcessorsPrecision processorsPrecision) - throws Exception { + private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int version) throws Exception { final List nodes; if (randomBoolean()) { nodes = getNodeNames().stream() .map( nodeName -> new DesiredNode( Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - processorsPrecision == ProcessorsPrecision.DOUBLE ? randomDoubleProcessorCount() : 0.5f, + randomDoubleProcessorCount(), ByteSizeValue.ofGb(randomIntBetween(10, 24)), ByteSizeValue.ofGb(randomIntBetween(128, 256)), clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() @@ -160,9 +90,7 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve .toList(); } else { nodes = getNodeNames().stream().map(nodeName -> { - double minProcessors = processorsPrecision == ProcessorsPrecision.DOUBLE - ? randomDoubleProcessorCount() - : randomFloatProcessorCount(); + double minProcessors = randomDoubleProcessorCount(); return new DesiredNode( Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), new DesiredNode.ProcessorsRange(minProcessors, minProcessors + randomIntBetween(10, 20)), @@ -175,21 +103,6 @@ private void addClusterNodesToDesiredNodesWithProcessorsOrProcessorRanges(int ve updateDesiredNodes(nodes, version); } - private void addClusterNodesToDesiredNodesWithIntegerProcessors(int version) throws Exception { - final var nodes = getNodeNames().stream() - .map( - nodeName -> new DesiredNode( - Settings.builder().put(NODE_NAME_SETTING.getKey(), nodeName).build(), - randomIntBetween(1, 24), - ByteSizeValue.ofGb(randomIntBetween(10, 24)), - ByteSizeValue.ofGb(randomIntBetween(128, 256)), - clusterHasFeature(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED) ? null : Build.current().version() - ) - ) - .toList(); - updateDesiredNodes(nodes, version); - } - private void updateDesiredNodes(List nodes, int version) throws IOException { final var request = new Request("PUT", "/_internal/desired_nodes/upgrade_test/" + version); try (var builder = JsonXContent.contentBuilder()) { @@ -226,10 +139,6 @@ private double randomDoubleProcessorCount() { return randomDoubleBetween(0.5, 512.1234, true); } - private float randomFloatProcessorCount() { - return randomIntBetween(1, 512) + randomFloat(); - } - @SuppressWarnings("unchecked") private static T extractValue(Map map, String path) { return (T) XContentMapValues.extractValue(path, map); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 70658da70eb80..bca0c26ad2c32 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.junit.Before; import java.io.IOException; @@ -244,10 +243,6 @@ private String getRollupIndexName() throws IOException { } public void testRollupIndex() throws Exception { - assumeTrue( - "Downsample got many stability improvements in 8.10.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_DOWNSAMPLING_STABLE) - ); if (isOldCluster()) { createIlmPolicy(); createIndex(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 65bf62783fd69..090f409fd46d0 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -18,10 +18,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.test.ListMatcher; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -258,7 +258,6 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio public void testTsdb() throws IOException { final Version oldClusterVersion = Version.fromString(getOldClusterVersion()); - assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); StringBuilder bulk = new StringBuilder(); if (isOldCluster()) { @@ -385,6 +384,7 @@ private void tsdbBulk(StringBuilder bulk, String dim, long timeStart, long timeE private void assertTsdbAgg(final Version oldClusterVersion, final List expectedTsids, final Matcher... expected) throws IOException { + @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_ANALYTICS) boolean onOrAfterTsidHashingVersion = oldClusterVersion.onOrAfter(Version.V_8_13_0); Request request = new Request("POST", "/tsdb/_search"); request.addParameter("size", "0"); @@ -414,8 +414,6 @@ private void assertTsdbAgg(final Version oldClusterVersion, final List e } public void testSyntheticSource() throws IOException { - assumeTrue("added in 8.4.0", oldClusterHasFeature(RestTestLegacyFeatures.SYNTHETIC_SOURCE_SUPPORTED)); - if (isOldCluster()) { Request createIndex = new Request("PUT", "/synthetic"); XContentBuilder indexSpec = XContentBuilder.builder(XContentType.JSON.xContent()).startObject(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java index 8c369ebc9950d..1eb7cbd3f70c2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/LogsIndexModeRollingUpgradeIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.hamcrest.Matcher; @@ -30,9 +29,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; - public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCase { @ClassRule() @@ -160,14 +156,10 @@ public void testLogsIndexing() throws IOException { assertOK(bulkIndexResponse); assertThat(entityAsMap(bulkIndexResponse).get("errors"), Matchers.is(false)); - assertIndexMappingsAndSettings(0, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(1, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings(2, Matchers.nullValue(), matchesMap().extraOk()); - assertIndexMappingsAndSettings( - 3, - Matchers.equalTo("logsdb"), - matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic")) - ); + assertIndexSettings(0, Matchers.nullValue()); + assertIndexSettings(1, Matchers.nullValue()); + assertIndexSettings(2, Matchers.nullValue()); + assertIndexSettings(3, Matchers.equalTo("logsdb")); } } @@ -183,13 +175,11 @@ static void enableLogsdbByDefault() throws IOException { assertOK(client().performRequest(request)); } - private void assertIndexMappingsAndSettings(int backingIndex, final Matcher indexModeMatcher, final MapMatcher mappingsMatcher) - throws IOException { + private void assertIndexSettings(int backingIndex, final Matcher indexModeMatcher) throws IOException { assertThat( getSettings(client(), getWriteBackingIndex(client(), "logs-apache-production", backingIndex)).get("index.mode"), indexModeMatcher ); - assertMap(getIndexMappingAsMap(getWriteBackingIndex(client(), "logs-apache-production", backingIndex)), mappingsMatcher); } private static Request createDataStream(final String dataStreamName) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 3343a683bbd11..9217852f1867c 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -50,12 +49,6 @@ public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { } public void testSnapshotBasedRecovery() throws Exception { - assumeTrue( - "Cancel shard allocation command is broken for initial versions of the desired_balance allocator", - oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_SUPPORTED) == false - || oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_FIXED) - ); - final String indexName = "snapshot_based_recovery"; final String repositoryName = "snapshot_based_recovery_repo"; final int numDocs = 200; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 6744c84f29d0f..46b39128c3a31 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.time.Instant; @@ -24,8 +23,6 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class TsdbIT extends AbstractRollingUpgradeTestCase { @@ -131,7 +128,6 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { """; public void testTsdbDataStream() throws Exception { - assumeTrue("TSDB was GA-ed in 8.7.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE)); String dataStreamName = "k8s"; if (isOldCluster()) { final String INDEX_TEMPLATE = """ @@ -155,70 +151,6 @@ public void testTsdbDataStream() throws Exception { } } - public void testTsdbDataStreamWithComponentTemplate() throws Exception { - assumeTrue( - "TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", - oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE) - && (oldClusterHasFeature(RestTestLegacyFeatures.TSDB_EMPTY_TEMPLATE_FIXED) == false) - ); - String dataStreamName = "template-with-component-template"; - if (isOldCluster()) { - final String COMPONENT_TEMPLATE = """ - { - "template": $TEMPLATE - } - """; - var putComponentTemplate = new Request("POST", "/_component_template/1"); - String template = TEMPLATE.replace("\"time_series\"", "\"time_series\", \"routing_path\": [\"k8s.pod.uid\"]"); - putComponentTemplate.setJsonEntity(COMPONENT_TEMPLATE.replace("$TEMPLATE", template)); - assertOK(client().performRequest(putComponentTemplate)); - final String INDEX_TEMPLATE = """ - { - "index_patterns": ["$PATTERN"], - "composed_of": ["1"], - "data_stream": { - } - }"""; - // Add composable index template - String templateName = "2"; - var putIndexTemplateRequest = new Request("POST", "/_index_template/" + templateName); - putIndexTemplateRequest.setJsonEntity(INDEX_TEMPLATE.replace("$PATTERN", dataStreamName)); - assertOK(client().performRequest(putIndexTemplateRequest)); - - performOldClustertOperations(templateName, dataStreamName); - } else if (isMixedCluster()) { - performMixedClusterOperations(dataStreamName); - } else if (isUpgradedCluster()) { - performUpgradedClusterOperations(dataStreamName); - - var dataStreams = getDataStream(dataStreamName); - assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.generation"), equalTo(2)); - String firstBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.0.index_name"); - { - var indices = getIndex(firstBackingIndex); - var escapedBackingIndex = firstBackingIndex.replace(".", "\\."); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), nullValue()); - String startTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"); - assertThat(startTime, nullValue()); - String endTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"); - assertThat(endTime, nullValue()); - } - String secondBackingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices.1.index_name"); - { - var indices = getIndex(secondBackingIndex); - var escapedBackingIndex = secondBackingIndex.replace(".", "\\."); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo(dataStreamName)); - assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), equalTo("time_series")); - String startTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"); - assertThat(startTime, notNullValue()); - String endTime = ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"); - assertThat(endTime, notNullValue()); - } - } - } - private void performUpgradedClusterOperations(String dataStreamName) throws Exception { ensureGreen(dataStreamName); var rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover"); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index f3a322b54039a..b2298c12b7b98 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -17,13 +17,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.util.Map; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; -import static org.hamcrest.Matchers.is; public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { @@ -35,33 +33,22 @@ public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { private static final String EXPECTED_WARNING = "[index.indexing.slowlog.level] setting was deprecated in Elasticsearch and will " + "be removed in a future release! See the breaking changes documentation for the next major version."; - private static final String EXPECTED_V8_WARNING = "[index.indexing.slowlog.level] setting was deprecated in the previous Elasticsearch" - + " release and is removed in this release."; - public void testOldIndexSettings() throws Exception { if (isOldCluster()) { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME); createTestIndex.setJsonEntity("{\"settings\": {\"index.indexing.slowlog.level\": \"WARN\"}}"); createTestIndex.setOptions(expectWarnings(EXPECTED_WARNING)); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED)) { - assertTrue( - expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() - .contains("unknown setting [index.indexing.slowlog.level]") - ); + assertTrue( + expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() + .contains("unknown setting [index.indexing.slowlog.level]") + ); - Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); - client().performRequest(createTestIndex1); - } else { - // create index with settings no longer valid in 8.0 - client().performRequest(createTestIndex); - } + Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); + client().performRequest(createTestIndex1); // add some data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - bulk.setOptions(expectWarnings(EXPECTED_WARNING)); - } bulk.setJsonEntity(Strings.format(""" {"index": {"_index": "%s"}} {"f1": "v1", "f2": "v2"} @@ -71,34 +58,12 @@ public void testOldIndexSettings() throws Exception { // add some more data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - bulk.setOptions(expectWarnings(EXPECTED_WARNING)); - } bulk.setJsonEntity(Strings.format(""" {"index": {"_index": "%s"}} {"f1": "v3", "f2": "v4"} """, INDEX_NAME)); client().performRequest(bulk); } else { - if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { - Request createTestIndex = new Request("PUT", "/" + INDEX_NAME + "/_settings"); - // update index settings should work - createTestIndex.setJsonEntity("{\"index.indexing.slowlog.level\": \"INFO\"}"); - createTestIndex.setOptions(expectWarnings(EXPECTED_V8_WARNING)); - client().performRequest(createTestIndex); - - // ensure we were able to change the setting, despite it having no effect - Request indexSettingsRequest = new Request("GET", "/" + INDEX_NAME + "/_settings"); - Map response = entityAsMap(client().performRequest(indexSettingsRequest)); - - var slowLogLevel = (String) (XContentMapValues.extractValue( - INDEX_NAME + ".settings.index.indexing.slowlog.level", - response - )); - - // check that we can read our old index settings - assertThat(slowLogLevel, is("INFO")); - } assertCount(INDEX_NAME, 2); } } @@ -118,16 +83,6 @@ private void assertCount(String index, int countAtLeast) throws IOException { public static void updateIndexSettingsPermittingSlowlogDeprecationWarning(String index, Settings.Builder settings) throws IOException { Request request = new Request("PUT", "/" + index + "/_settings"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(settings.build())); - if (oldClusterHasFeature(RestTestLegacyFeatures.DEPRECATION_WARNINGS_LEAK_FIXED) == false) { - // There is a bug (fixed in 7.17.9 and 8.7.0 where deprecation warnings could leak into ClusterApplierService#applyChanges) - // Below warnings are set (and leaking) from an index in this test case - request.setOptions(expectVersionSpecificWarnings(v -> { - v.compatible( - "[index.indexing.slowlog.level] setting was deprecated in Elasticsearch and will be removed in a future release! " - + "See the breaking changes documentation for the next major version." - ); - })); - } client().performRequest(request); } } diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 8e1df37804708..650d17e41de7f 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -59,4 +59,11 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task -> task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") task.skipTest("cat.aliases/10_basic/Deprecated local parameter", "CAT APIs not covered by compatibility policy") task.skipTest("cat.shards/10_basic/Help", "sync_id is removed in 9.0") + task.skipTest("search/500_date_range/from, to, include_lower, include_upper deprecated", "deprecated parameters are removed in 9.0") + task.skipTest("tsdb/20_mapping/stored source is supported", "no longer serialize source_mode") + task.skipTest("tsdb/20_mapping/Synthetic source", "no longer serialize source_mode") + task.skipTest("logsdb/10_settings/create logs index", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/stored _source mode is supported", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/include/exclude is supported with stored _source", "no longer serialize source_mode") + task.skipTest("logsdb/20_source_mapping/synthetic _source is default", "no longer serialize source_mode") }) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json index bce8dfd794dca..6f3d09c15c081 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json @@ -55,6 +55,10 @@ "type": "string", "description": "Specific the time to live for the point in time", "required": true + }, + "allow_partial_search_results": { + "type": "boolean", + "description": "Specify whether to tolerate shards missing when creating the point-in-time, or otherwise throw an exception. (default: false)" } }, "body":{ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml index d0f89b1b8b6cb..463df7d2ab1bb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/10_settings.yml @@ -76,11 +76,6 @@ create logs index: - is_true: test - match: { test.settings.index.mode: "logsdb" } - - do: - indices.get_mapping: - index: test - - match: { test.mappings._source.mode: synthetic } - --- using default timestamp field mapping: - requires: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml index 27146557bb1be..06a007b8aaca5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/logsdb/20_source_mapping.yml @@ -13,10 +13,10 @@ synthetic _source is default: index: mode: logsdb - do: - indices.get: + indices.get_settings: index: test-default-source - - - match: { test-default-source.mappings._source.mode: "synthetic" } + - match: { test-default-source.settings.index.mode: logsdb } + - match: { test-default-source.settings.index.mapping.source.mode: null } --- stored _source mode is supported: @@ -28,11 +28,12 @@ stored _source mode is supported: index: mode: logsdb mapping.source.mode: stored + - do: - indices.get: + indices.get_settings: index: test-stored-source - - - match: { test-stored-source.mappings._source.mode: "stored" } + - match: { test-stored-source.settings.index.mode: logsdb } + - match: { test-stored-source.settings.index.mapping.source.mode: stored } --- disabled _source is not supported: @@ -110,7 +111,6 @@ include/exclude is supported with stored _source: indices.get: index: test-includes - - match: { test-includes.mappings._source.mode: "stored" } - match: { test-includes.mappings._source.includes: ["a"] } - do: @@ -129,5 +129,4 @@ include/exclude is supported with stored _source: indices.get: index: test-excludes - - match: { test-excludes.mappings._source.mode: "stored" } - match: { test-excludes.mappings._source.excludes: ["b"] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml index e9bfffb8da604..76057b5a364fb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/500_date_range.yml @@ -123,29 +123,3 @@ setup: - match: { hits.total: 1 } - length: { hits.hits: 1 } - match: { hits.hits.0._id: "4" } - - ---- -"from, to, include_lower, include_upper deprecated": - - requires: - cluster_features: "gte_v8.16.0" - reason: 'from, to, include_lower, include_upper parameters are deprecated since 8.16.0' - test_runner_features: warnings - - - do: - warnings: - - "Deprecated field [from] used, this field is unused and will be removed entirely" - - "Deprecated field [to] used, this field is unused and will be removed entirely" - - "Deprecated field [include_lower] used, this field is unused and will be removed entirely" - - "Deprecated field [include_upper] used, this field is unused and will be removed entirely" - search: - index: dates - body: - sort: field - query: - range: - date: - from: 1000 - to: 2023 - include_lower: false - include_upper: false diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 4d8f03a6e5e18..9fe3f5e0b7272 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -450,11 +450,6 @@ nested fields: type: long time_series_metric: gauge - - do: - indices.get_mapping: {} - - - match: {tsdb-synthetic.mappings._source.mode: synthetic} - --- stored source is supported: - requires: @@ -486,12 +481,6 @@ stored source is supported: type: keyword time_series_dimension: true - - do: - indices.get: - index: tsdb_index - - - match: { tsdb_index.mappings._source.mode: "stored" } - --- disabled source is not supported: - requires: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java deleted file mode 100644 index 8b5e014b519c8..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/ListenerActionIT.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.action; - -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.Requests; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; - -public class ListenerActionIT extends ESIntegTestCase { - public void testThreadedListeners() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - final AtomicReference failure = new AtomicReference<>(); - final AtomicReference threadName = new AtomicReference<>(); - Client client = client(); - - IndexRequest request = new IndexRequest("test").id("1"); - if (randomBoolean()) { - // set the source, without it, we will have a verification failure - request.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); - } - - client.index(request, new ActionListener() { - @Override - public void onResponse(DocWriteResponse indexResponse) { - threadName.set(Thread.currentThread().getName()); - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - threadName.set(Thread.currentThread().getName()); - failure.set(e); - latch.countDown(); - } - }); - - latch.await(); - - assertFalse(threadName.get().contains("listener")); - } -} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index 4977d87d5a348..deae022795ad2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -65,7 +65,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .put(super.nodeSettings(nodeOrdinal, otherSettings)) .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") - .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "2KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") .build(); } @@ -161,6 +161,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { IndexRequest indexRequest = indexRequest(index); long total = indexRequest.ramBytesUsed(); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); while (total < 2048) { refCounted.incRef(); handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); @@ -175,6 +177,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); PlainActionFuture future = new PlainActionFuture<>(); handler.lastItems(List.of(indexRequest), refCounted::decRef, future); @@ -192,6 +196,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); AtomicBoolean nextPage = new AtomicBoolean(false); @@ -217,6 +223,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); assertTrue(nextPage.get()); nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); ArrayList> requestsThrottle = new ArrayList<>(); // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled @@ -235,6 +243,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { // Wait until we are ready for the next page assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); for (IncrementalBulkService.Handler h : handlers) { refCounted.incRef(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index a1395f81eb091..67576059de1e0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -669,7 +670,7 @@ public Aggregator subAggregator(String aggregatorName) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[] { buildEmptyAggregation() }; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 9364e7437141e..e4d44212f2854 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -9,35 +9,48 @@ package org.elasticsearch.monitor.metrics; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.IncrementalBulkService; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import static org.elasticsearch.index.IndexingPressure.MAX_COORDINATING_BYTES; import static org.elasticsearch.index.IndexingPressure.MAX_PRIMARY_BYTES; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class NodeIndexingMetricsIT extends ESIntegTestCase { @@ -453,6 +466,211 @@ public void testPrimaryDocumentRejectionMetricsFluctuatingOverTime() throws Exce } } + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkLowWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + IndexRequest indexRequest = indexRequest(index); + long total = indexRequest.ramBytesUsed(); + while (total < 2048) { + refCounted.incRef(); + handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + indexRequest = indexRequest(index); + total += indexRequest.ramBytesUsed(); + } + + assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), greaterThan(0L)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + refCounted.incRef(); + handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + PlainActionFuture future = new PlainActionFuture<>(); + handler.lastItems(List.of(indexRequest), refCounted::decRef, future); + + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + assertFalse(refCounted.hasReferences()); + } + + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkHighWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + ArrayList handlers = new ArrayList<>(); + for (int i = 0; i < 4; ++i) { + ArrayList> requests = new ArrayList<>(); + add512BRequests(requests, index); + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + handlers.add(handler); + refCounted.incRef(); + handler.addItems(requests, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + } + + // Test that a request smaller than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is not throttled + ArrayList> requestsNoThrottle = new ArrayList<>(); + add512BRequests(requestsNoThrottle, index); + IncrementalBulkService.Handler handlerNoThrottle = incrementalBulkService.newBulkRequest(); + handlers.add(handlerNoThrottle); + refCounted.incRef(); + handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + ArrayList> requestsThrottle = new ArrayList<>(); + // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled + add512BRequests(requestsThrottle, index); + add512BRequests(requestsThrottle, index); + + CountDownLatch finishLatch = new CountDownLatch(1); + blockWritePool(threadPool, finishLatch); + IncrementalBulkService.Handler handlerThrottled = incrementalBulkService.newBulkRequest(); + refCounted.incRef(); + handlerThrottled.addItems(requestsThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertFalse(nextPage.get()); + finishLatch.countDown(); + + handlers.add(handlerThrottled); + + // Wait until we are ready for the next page + assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + + for (IncrementalBulkService.Handler h : handlers) { + refCounted.incRef(); + PlainActionFuture future = new PlainActionFuture<>(); + h.lastItems(List.of(indexRequest(index)), refCounted::decRef, future); + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + } + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + refCounted.decRef(); + assertFalse(refCounted.hasReferences()); + testTelemetryPlugin.collect(); + } + private static Measurement getSingleRecordedMetric(Function> metricGetter, String name) { final List measurements = metricGetter.apply(name); assertFalse("Indexing metric is not recorded", measurements.isEmpty()); @@ -470,4 +688,47 @@ private static boolean doublesEquals(double expected, double actual) { final double eps = .0000001; return Math.abs(expected - actual) < eps; } + + private static IndexRequest indexRequest(String index) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(index); + indexRequest.source(Map.of("field", randomAlphaOfLength(10))); + return indexRequest; + } + + private static void add512BRequests(ArrayList> requests, String index) { + long total = 0; + while (total < 512) { + IndexRequest indexRequest = indexRequest(index); + requests.add(indexRequest); + total += indexRequest.ramBytesUsed(); + } + assertThat(total, lessThan(1024L)); + } + + private static void blockWritePool(ThreadPool threadPool, CountDownLatch finishLatch) { + final var threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); + final var startBarrier = new CyclicBarrier(threadCount + 1); + final var blockingTask = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + safeAwait(startBarrier); + safeAwait(finishLatch); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + for (int i = 0; i < threadCount; i++) { + threadPool.executor(ThreadPool.Names.WRITE).execute(blockingTask); + } + safeAwait(startBarrier); + } } diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 32198ba7584be..3c5c365654206 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.index.Index; import org.elasticsearch.index.mapper.DocumentParsingException; @@ -611,23 +613,31 @@ protected static void generateThrowableXContent(XContentBuilder builder, Params */ public static XContentBuilder generateFailureXContent(XContentBuilder builder, Params params, @Nullable Exception e, boolean detailed) throws IOException { - // No exception to render as an error + if (builder.getRestApiVersion() == RestApiVersion.V_8) { + if (e == null) { + return builder.field(ERROR, "unknown"); + } + if (detailed == false) { + return generateNonDetailedFailureXContentV8(builder, e); + } + // else fallthrough + } + if (e == null) { - return builder.field(ERROR, "unknown"); + // No exception to render as an error + builder.startObject(ERROR); + builder.field(TYPE, "unknown"); + builder.field(REASON, "unknown"); + return builder.endObject(); } - // Render the exception with a simple message if (detailed == false) { - String message = "No ElasticsearchException found"; - Throwable t = e; - for (int counter = 0; counter < 10 && t != null; counter++) { - if (t instanceof ElasticsearchException) { - message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]"; - break; - } - t = t.getCause(); - } - return builder.field(ERROR, message); + // just render the type & message + Throwable t = ExceptionsHelper.unwrapCause(e); + builder.startObject(ERROR); + builder.field(TYPE, getExceptionName(t)); + builder.field(REASON, t.getMessage()); + return builder.endObject(); } // Render the exception with all details @@ -646,6 +656,20 @@ public static XContentBuilder generateFailureXContent(XContentBuilder builder, P return builder.endObject(); } + @UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // remove V8 API + private static XContentBuilder generateNonDetailedFailureXContentV8(XContentBuilder builder, @Nullable Exception e) throws IOException { + String message = "No ElasticsearchException found"; + Throwable t = e; + for (int counter = 0; counter < 10 && t != null; counter++) { + if (t instanceof ElasticsearchException) { + message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]"; + break; + } + t = t.getCause(); + } + return builder.field(ERROR, message); + } + /** * Parses the output of {@link #generateFailureXContent(XContentBuilder, Params, Exception, boolean)} */ @@ -729,8 +753,8 @@ public static String getExceptionName(Throwable ex) { static String buildMessage(String type, String reason, String stack) { StringBuilder message = new StringBuilder("Elasticsearch exception ["); - message.append(TYPE).append('=').append(type).append(", "); - message.append(REASON).append('=').append(reason); + message.append(TYPE).append('=').append(type); + message.append(", ").append(REASON).append('=').append(reason); if (stack != null) { message.append(", ").append(STACK_TRACE).append('=').append(stack); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index aadfffb562558..688d2aaf905a6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -201,6 +201,11 @@ static TransportVersion def(int id) { public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_00_0); public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_00_0); + public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); + public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); + public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 7791ca200a785..40071b19af5d3 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -190,6 +190,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_17_0 = new Version(8_17_00_99); + public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java index bed2815f5a895..a0948af88e2f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportUpdateDesiredNodesAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.desirednodes.VersionConflictException; -import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodes; import org.elasticsearch.cluster.metadata.DesiredNodesMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -99,22 +98,6 @@ protected void masterOperation( ); } - @Override - protected void doExecute(Task task, UpdateDesiredNodesRequest request, ActionListener listener) { - if (request.clusterHasRequiredFeatures(nf -> featureService.clusterHasFeature(clusterService.state(), nf)) == false) { - listener.onFailure( - new IllegalArgumentException( - "Unable to use processor ranges, floating-point (with greater precision) processors " - + "in mixed-clusters with nodes that do not support feature " - + DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED.id() - ) - ); - return; - } - - super.doExecute(task, request, listener); - } - static ClusterState replaceDesiredNodes(ClusterState clusterState, DesiredNodes newDesiredNodes) { return clusterState.copyAndUpdateMetadata( metadata -> metadata.putCustom(DesiredNodesMetadata.TYPE, new DesiredNodesMetadata(newDesiredNodes)) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java index a94401fdd66f3..21b714b105b59 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -26,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.function.Predicate; public class UpdateDesiredNodesRequest extends AcknowledgedRequest { private static final TransportVersion DRY_RUN_VERSION = TransportVersions.V_8_4_0; @@ -117,11 +115,6 @@ public boolean isDryRun() { return dryRun; } - public boolean clusterHasRequiredFeatures(Predicate clusterHasFeature) { - return clusterHasFeature.test(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED) - || nodes.stream().allMatch(n -> n.clusterHasRequiredFeatures(clusterHasFeature)); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java index 83d1356e5ef62..d20eee96809e8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/features/TransportNodesFeaturesAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,8 +27,7 @@ import java.io.IOException; import java.util.List; -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) -// @UpdateForV10 // this can be removed in v10. It may be called by v8 nodes to v9 nodes. +@UpdateForV10(owner = UpdateForV10.Owner.CORE_INFRA) // this can be removed in v10. It may be called by v8 nodes to v9 nodes. public class TransportNodesFeaturesAction extends TransportNodesAction< NodesFeaturesRequest, NodesFeaturesResponse, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c1f867c247345..5c4be62723e07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -793,6 +793,8 @@ static class IndexPressureStats implements ToXContentFragment { long currentCoordinatingOps = 0; long currentPrimaryOps = 0; long currentReplicaOps = 0; + long lowWaterMarkSplits = 0; + long highWaterMarkSplits = 0; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats nodeStatIndexingPressureStats = nodeStat.getIndexingPressureStats(); if (nodeStatIndexingPressureStats != null) { @@ -816,6 +818,8 @@ static class IndexPressureStats implements ToXContentFragment { currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); totalCoordinatingRequests += nodeStatIndexingPressureStats.getTotalCoordinatingRequests(); + lowWaterMarkSplits += nodeStatIndexingPressureStats.getLowWaterMarkSplits(); + highWaterMarkSplits += nodeStatIndexingPressureStats.getHighWaterMarkSplits(); } } indexingPressureStats = new IndexingPressureStats( @@ -838,7 +842,9 @@ static class IndexPressureStats implements ToXContentFragment { currentPrimaryOps, currentReplicaOps, primaryDocumentRejections, - totalCoordinatingRequests + totalCoordinatingRequests, + lowWaterMarkSplits, + highWaterMarkSplits ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index d8db2c5e657b4..ce9b48666d6ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -48,9 +48,10 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "", parentTaskId, headers); } - public ClusterStatsRequest asRemoteStats() { - this.remoteStats = true; - return this; + public static ClusterStatsRequest newRemoteClusterStatsRequest() { + final var request = new ClusterStatsRequest(); + request.remoteStats = true; + return request; } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java index 737e83d4b30a1..07d9c11ae4c07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/LongMetric.java @@ -74,6 +74,7 @@ public static LongMetricValue fromStream(StreamInput in) throws IOException { try { // TODO: not sure what is the good value for minBarForHighestToLowestValueRatio here? Histogram dh = Histogram.decodeFromCompressedByteBuffer(bb, 1); + dh.setAutoResize(true); return new LongMetricValue(dh); } catch (DataFormatException e) { throw new IOException(e); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 36b018b5002eb..97585ea9a1024 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterSnapshotStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -108,20 +109,19 @@ public class TransportClusterStatsAction extends TransportNodesAction< private final MetadataStatsCache mappingStatsCache; private final MetadataStatsCache analysisStatsCache; private final RemoteClusterService remoteClusterService; - private final TransportRemoteClusterStatsAction remoteClusterStatsAction; @Inject public TransportClusterStatsAction( ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + Client client, NodeService nodeService, IndicesService indicesService, RepositoriesService repositoriesService, UsageService usageService, ActionFilters actionFilters, - Settings settings, - TransportRemoteClusterStatsAction remoteClusterStatsAction + Settings settings ) { super( TYPE.name(), @@ -141,7 +141,9 @@ public TransportClusterStatsAction( this.analysisStatsCache = new MetadataStatsCache<>(threadPool.getThreadContext(), AnalysisStats::of); this.remoteClusterService = transportService.getRemoteClusterService(); this.settings = settings; - this.remoteClusterStatsAction = remoteClusterStatsAction; + + // register remote-cluster action with transport service only and not as a local-node Action that the Client can invoke + new TransportRemoteClusterStatsAction(client, transportService, actionFilters); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java index 4d57f10807af6..882aaa8b18e15 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java @@ -10,11 +10,11 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,26 +27,26 @@ public class TransportRemoteClusterStatsAction extends HandledTransportAction { public static final String NAME = "cluster:monitor/stats/remote"; - public static final ActionType TYPE = new ActionType<>(NAME); public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( NAME, RemoteClusterStatsResponse::new ); - private final NodeClient client; + + private final Client client; + private final TransportService transportService; @Inject - public TransportRemoteClusterStatsAction(NodeClient client, TransportService transportService, ActionFilters actionFilters) { + public TransportRemoteClusterStatsAction(Client client, TransportService transportService, ActionFilters actionFilters) { super(NAME, transportService, actionFilters, RemoteClusterStatsRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.client = client; + this.transportService = transportService; } @Override protected void doExecute(Task task, RemoteClusterStatsRequest request, ActionListener listener) { - ClusterStatsRequest subRequest = new ClusterStatsRequest().asRemoteStats(); - subRequest.setParentTask(request.getParentTask()); - client.execute( + new ParentTaskAssigningClient(client, transportService.getLocalNode(), task).execute( TransportClusterStatsAction.TYPE, - subRequest, + ClusterStatsRequest.newRemoteClusterStatsRequest(), listener.map( clusterStatsResponse -> new RemoteClusterStatsResponse( clusterStatsResponse.getClusterUUID(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 7857e9a22e9b9..cb667400240f0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -120,27 +119,18 @@ public void onPrimaryOperationComplete( ActionListener listener ) { assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get( - clusterService.state().metadata().index(indexShardRoutingTable.shardId().getIndex()).getSettings() + UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), + replicaRequest.primaryRefreshResult.generation(), + false + ); + transportService.sendRequest( + transportService.getLocalNode(), + TransportUnpromotableShardRefreshAction.NAME, + unpromotableReplicaRequest, + new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) ); - - // Indices marked with fast refresh do not rely on refreshing the unpromotables - if (fastRefresh) { - listener.onResponse(null); - } else { - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); - transportService.sendRequest( - transportService.getLocalNode(), - TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, - new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) - ); - } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 6c24ec2d17604..4458c008babcd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,6 +24,9 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; + public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -73,6 +76,18 @@ protected void unpromotableShardOperation( return; } + // During an upgrade to FAST_REFRESH_RCO_2, we expect search shards to be first upgraded before the primary is upgraded. Thus, + // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. + // Note that the fast refresh setting is final. + // TODO: remove assertion (ES-9563) + assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false + || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO_2) + : "attempted to refresh a fast refresh search shard " + + shard + + " on transport version " + + transportService.getLocalNodeConnection().getTransportVersion() + + " (before FAST_REFRESH_RCO_2)"; + ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java new file mode 100644 index 0000000000000..814c512c43bec --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class ReindexDataStreamAction extends ActionType { + + public static final ReindexDataStreamAction INSTANCE = new ReindexDataStreamAction(); + public static final String NAME = "indices:admin/data_stream/reindex"; + + public ReindexDataStreamAction() { + super(NAME); + } + + public static class ReindexDataStreamResponse extends ActionResponse implements ToXContentObject { + private final String taskId; + + public ReindexDataStreamResponse(String taskId) { + super(); + this.taskId = taskId; + } + + public ReindexDataStreamResponse(StreamInput in) throws IOException { + super(in); + this.taskId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(taskId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("task", getTaskId()); + builder.endObject(); + return builder; + } + + public String getTaskId() { + return taskId; + } + + @Override + public int hashCode() { + return Objects.hashCode(taskId); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamResponse && taskId.equals(((ReindexDataStreamResponse) other).taskId); + } + + } + + public static class ReindexDataStreamRequest extends ActionRequest { + private final String sourceDataStream; + + public ReindexDataStreamRequest(String sourceDataStream) { + super(); + this.sourceDataStream = sourceDataStream; + } + + public ReindexDataStreamRequest(StreamInput in) throws IOException { + super(in); + this.sourceDataStream = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(sourceDataStream); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean getShouldStoreResult() { + return true; // do not wait_for_completion + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + @Override + public int hashCode() { + return Objects.hashCode(sourceDataStream); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamRequest + && sourceDataStream.equals(((ReindexDataStreamRequest) other).sourceDataStream); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 9e535344c9589..fb4b3907d2bfd 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -126,12 +126,10 @@ protected void asyncShardOperation(GetRequest request, ShardId shardId, ActionLi IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportGetAction for an index with fast refresh"; handleGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportGetAction only if an index has the fast refresh setting"; if (request.realtime()) { // we are not tied to a refresh cycle here anyway diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 34b3ae50e0b51..633e7ef6793ab 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -124,12 +124,10 @@ protected void asyncShardOperation(MultiGetShardRequest request, ShardId shardId IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); IndexShard indexShard = indexService.getShard(shardId.id()); if (indexShard.routingEntry().isPromotableToPrimary() == false) { - // TODO: Re-evaluate assertion (ES-8227) - // assert indexShard.indexSettings().isFastRefresh() == false - // : "a search shard should not receive a TransportShardMultiGetAction for an index with fast refresh"; handleMultiGetOnUnpromotableShard(request, indexShard, listener); return; } + // TODO: adapt assertion to assert only that it is not stateless (ES-9563) assert DiscoveryNode.isStateless(clusterService.getSettings()) == false || indexShard.indexSettings().isFastRefresh() : "in Stateless a promotable to primary shard can receive a TransportShardMultiGetAction only if an index has " + "the fast refresh setting"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java index 3ed1dfef50053..760b87af49a78 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineResponse.java @@ -80,7 +80,7 @@ public RestStatus status() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); for (PipelineConfiguration pipeline : pipelines) { - builder.field(pipeline.getId(), summary ? Map.of() : pipeline.getConfigAsMap()); + builder.field(pipeline.getId(), summary ? Map.of() : pipeline.getConfig()); } builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index c051f0ca7a6f5..09fb70fb06ba4 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -299,7 +299,7 @@ protected void performPhaseOnShard(final int shardIndex, final SearchShardIterat } private void doPerformPhaseOnShard(int shardIndex, SearchShardIterator shardIt, SearchShardTarget shard, Releasable releasable) { - executePhaseOnShard(shardIt, shard, new SearchActionListener<>(shard, shardIndex) { + var shardListener = new SearchActionListener(shard, shardIndex) { @Override public void innerOnResponse(Result result) { try { @@ -315,7 +315,15 @@ public void onFailure(Exception e) { releasable.close(); onShardFailure(shardIndex, shard, shardIt, e); } - }); + }; + final Transport.Connection connection; + try { + connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); + } catch (Exception e) { + shardListener.onFailure(e); + return; + } + executePhaseOnShard(shardIt, connection, shardListener); } private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { @@ -327,12 +335,12 @@ private void failOnUnavailable(int shardIndex, SearchShardIterator shardIt) { /** * Sends the request to the actual shard. * @param shardIt the shards iterator - * @param shard the shard routing to send the request for + * @param connection to node that the shard is located on * @param listener the listener to notify on response */ protected abstract void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 69ca1569a7c07..25d59a06664da 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -84,16 +84,9 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction @Override protected void executePhaseOnShard( final SearchShardIterator shardIt, - final SearchShardTarget shard, + final Transport.Connection connection, final SearchActionListener listener ) { - final Transport.Connection connection; - try { - connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - listener.onFailure(e); - return; - } getSearchTransport().sendExecuteDfs(connection, buildShardSearchRequest(shardIt, listener.requestIndex), getTask(), listener); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index d91ea85e2fa97..986f7210c0d1b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -79,7 +79,7 @@ protected static void doCheckNoMissingShards( /** * Releases shard targets that are not used in the docsIdsToLoad. */ - protected void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, AbstractSearchAsyncAction context) { + protected static void releaseIrrelevantSearchContext(SearchPhaseResult searchPhaseResult, AbstractSearchAsyncAction context) { // we only release search context that we did not fetch from, if we are not scrolling // or using a PIT and if it has at least one hit that didn't make it to the global topDocs if (searchPhaseResult == null) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 84e0e2adea612..f75b84abc2f0f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -91,16 +91,9 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction listener ) { - final Transport.Connection connection; - try { - connection = getConnection(shard.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - listener.onFailure(e); - return; - } ShardSearchRequest request = rewriteShardSearchRequest(super.buildShardSearchRequest(shardIt, listener.requestIndex)); getSearchTransport().sendExecuteQuery(connection, request, getTask(), listener); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 7ba4a7ce59869..9e60eedbad6a2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -35,7 +35,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; @@ -252,16 +251,9 @@ protected String missingShardsErrorMessage(StringBuilder missingShards) { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener phaseListener ) { - final Transport.Connection connection; - try { - connection = connectionLookup.apply(shardIt.getClusterAlias(), shard.getNodeId()); - } catch (Exception e) { - phaseListener.onFailure(e); - return; - } transportService.sendChildRequest( connection, OPEN_SHARD_READER_CONTEXT_NAME, diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 683c3589c893d..7414aeeb2c405 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -19,7 +19,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; @@ -53,9 +52,7 @@ public void refreshShard( case WAIT_UNTIL -> waitUntil(indexShard, location, new ActionListener<>() { @Override public void onResponse(Boolean forced) { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (location != null && (indexShard.routingEntry().isSearchable() == false && fastRefresh == false)) { + if (location != null && indexShard.routingEntry().isSearchable() == false) { refreshUnpromotables(indexShard, location, listener, forced, postWriteRefreshTimeout); } else { listener.onResponse(forced); @@ -68,9 +65,7 @@ public void onFailure(Exception e) { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0 && fastRefresh == false) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 699198a8e22c2..56d185645e149 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginsLoader; import java.io.PrintStream; @@ -42,6 +43,9 @@ class Bootstrap { // the loaded settings for the node, not valid until after phase 2 of initialization private final SetOnce nodeEnv = new SetOnce<>(); + // loads information about plugins required for entitlements in phase 2, used by plugins service in phase 3 + private final SetOnce pluginsLoader = new SetOnce<>(); + Bootstrap(PrintStream out, PrintStream err, ServerArgs args) { this.out = out; this.err = err; @@ -72,6 +76,14 @@ Environment environment() { return nodeEnv.get(); } + void setPluginsLoader(PluginsLoader pluginsLoader) { + this.pluginsLoader.set(pluginsLoader); + } + + PluginsLoader pluginsLoader() { + return pluginsLoader.get(); + } + void exitWithNodeValidationException(NodeValidationException e) { Logger logger = LogManager.getLogger(Elasticsearch.class); logger.error("node validation exception\n{}", e.getMessage()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 2a83f749e7d33..95e5b00a2805f 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; @@ -41,6 +42,9 @@ import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginBundle; +import org.elasticsearch.plugins.PluginsLoader; +import org.elasticsearch.plugins.PluginsUtils; import java.io.IOException; import java.io.InputStream; @@ -50,8 +54,10 @@ import java.nio.file.Path; import java.security.Permission; import java.security.Security; +import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -199,9 +205,24 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { VectorUtil.class ); + // load the plugin Java modules and layers now for use in entitlements + var pluginsLoader = new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); + bootstrap.setPluginsLoader(pluginsLoader); + if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { logger.info("Bootstrapping Entitlements"); - EntitlementBootstrap.bootstrap(); + + List> pluginData = new ArrayList<>(); + Set moduleBundles = PluginsUtils.getModuleBundles(nodeEnv.modulesFile()); + for (PluginBundle moduleBundle : moduleBundles) { + pluginData.add(Tuple.tuple(moduleBundle.getDir(), moduleBundle.pluginDescriptor().isModular())); + } + Set pluginBundles = PluginsUtils.getPluginBundles(nodeEnv.pluginsFile()); + for (PluginBundle pluginBundle : pluginBundles) { + pluginData.add(Tuple.tuple(pluginBundle.getDir(), pluginBundle.pluginDescriptor().isModular())); + } + // TODO: add a functor to map module to plugin name + EntitlementBootstrap.bootstrap(pluginData, callerClass -> null); } else { // install SM after natives, shutdown hooks, etc. logger.info("Bootstrapping java SecurityManager"); @@ -244,7 +265,7 @@ private static void ensureInitialized(Class... classes) { private static void initPhase3(Bootstrap bootstrap) throws IOException, NodeValidationException { checkLucene(); - Node node = new Node(bootstrap.environment()) { + Node node = new Node(bootstrap.environment(), bootstrap.pluginsLoader()) { @Override protected void validateNodeBeforeAcceptingRequests( final BootstrapContext context, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 8a4464f194fc5..35b6eb1852237 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -246,6 +246,7 @@ public Coordinator( this.joinValidationService = new JoinValidationService( settings, transportService, + namedWriteableRegistry, this::getStateForJoinValidationService, () -> getLastAcceptedState().metadata(), this.onJoinValidators diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java index 7de7fd4d92d1b..9d5d74fa24648 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -106,6 +108,7 @@ public class JoinValidationService { public JoinValidationService( Settings settings, TransportService transportService, + NamedWriteableRegistry namedWriteableRegistry, Supplier clusterStateSupplier, Supplier metadataSupplier, Collection> joinValidators @@ -120,9 +123,9 @@ public JoinValidationService( transportService.registerRequestHandler( JoinValidationService.JOIN_VALIDATE_ACTION_NAME, this.responseExecutor, - ValidateJoinRequest::new, + BytesTransportRequest::new, (request, channel, task) -> { - final var remoteState = request.getOrReadState(); + final var remoteState = readClusterState(namedWriteableRegistry, request); final var remoteMetadata = remoteState.metadata(); final var localMetadata = metadataSupplier.get(); if (localMetadata.clusterUUIDCommitted() && localMetadata.clusterUUID().equals(remoteMetadata.clusterUUID()) == false) { @@ -145,6 +148,20 @@ public JoinValidationService( ); } + private static ClusterState readClusterState(NamedWriteableRegistry namedWriteableRegistry, BytesTransportRequest request) + throws IOException { + try ( + var bytesStreamInput = request.bytes().streamInput(); + var in = new NamedWriteableAwareStreamInput( + CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), + namedWriteableRegistry + ) + ) { + in.setTransportVersion(request.version()); + return ClusterState.readFrom(in, null); + } + } + public void validateJoin(DiscoveryNode discoveryNode, ActionListener listener) { // This node isn't in the cluster yet so ClusterState#getMinTransportVersion() doesn't apply, we must obtain a specific connection // so we can check its transport version to decide how to proceed. diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java deleted file mode 100644 index c81e4877196b3..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.cluster.coordination; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; -import org.elasticsearch.transport.TransportRequest; - -import java.io.IOException; - -public class ValidateJoinRequest extends TransportRequest { - private final CheckedSupplier stateSupplier; - private final RefCounted refCounted; - - public ValidateJoinRequest(StreamInput in) throws IOException { - super(in); - // recent versions send a BytesTransportRequest containing a compressed representation of the state - final var bytes = in.readReleasableBytesReference(); - final var version = in.getTransportVersion(); - final var namedWriteableRegistry = in.namedWriteableRegistry(); - this.stateSupplier = () -> readCompressed(version, bytes, namedWriteableRegistry); - this.refCounted = bytes; - } - - private static ClusterState readCompressed( - TransportVersion version, - BytesReference bytes, - NamedWriteableRegistry namedWriteableRegistry - ) throws IOException { - try ( - var bytesStreamInput = bytes.streamInput(); - var in = new NamedWriteableAwareStreamInput( - CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), - namedWriteableRegistry - ) - ) { - in.setTransportVersion(version); - return ClusterState.readFrom(in, null); - } - } - - public ValidateJoinRequest(ClusterState state) { - this.stateSupplier = () -> state; - this.refCounted = null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - stateSupplier.get().writeTo(out); - } - - public ClusterState getOrReadState() throws IOException { - return stateSupplier.get(); - } - - @Override - public void incRef() { - if (refCounted != null) { - refCounted.incRef(); - } - } - - @Override - public boolean tryIncRef() { - return refCounted == null || refCounted.tryIncRef(); - } - - @Override - public boolean decRef() { - return refCounted != null && refCounted.decRef(); - } - - @Override - public boolean hasReferences() { - return refCounted == null || refCounted.hasReferences(); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java b/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java deleted file mode 100644 index 4d9074be15695..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListener.java +++ /dev/null @@ -1,218 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.features; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.admin.cluster.node.features.NodeFeatures; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesRequest; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesResponse; -import org.elasticsearch.action.admin.cluster.node.features.TransportNodesFeaturesAction; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterFeatures; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.cluster.service.MasterServiceTaskQueue; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; -import java.util.stream.Collectors; - -@UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed in v9 -public class NodeFeaturesFixupListener implements ClusterStateListener { - - private static final Logger logger = LogManager.getLogger(NodeFeaturesFixupListener.class); - - private static final TimeValue RETRY_TIME = TimeValue.timeValueSeconds(30); - - private final MasterServiceTaskQueue taskQueue; - private final ClusterAdminClient client; - private final Scheduler scheduler; - private final Executor executor; - private final Set pendingNodes = Collections.synchronizedSet(new HashSet<>()); - - public NodeFeaturesFixupListener(ClusterService service, ClusterAdminClient client, ThreadPool threadPool) { - // there tends to be a lot of state operations on an upgrade - this one is not time-critical, - // so use LOW priority. It just needs to be run at some point after upgrade. - this( - service.createTaskQueue("fix-node-features", Priority.LOW, new NodesFeaturesUpdater()), - client, - threadPool, - threadPool.executor(ThreadPool.Names.CLUSTER_COORDINATION) - ); - } - - NodeFeaturesFixupListener( - MasterServiceTaskQueue taskQueue, - ClusterAdminClient client, - Scheduler scheduler, - Executor executor - ) { - this.taskQueue = taskQueue; - this.client = client; - this.scheduler = scheduler; - this.executor = executor; - } - - class NodesFeaturesTask implements ClusterStateTaskListener { - private final Map> results; - private final int retryNum; - - NodesFeaturesTask(Map> results, int retryNum) { - this.results = results; - this.retryNum = retryNum; - } - - @Override - public void onFailure(Exception e) { - logger.error("Could not apply features for nodes {} to cluster state", results.keySet(), e); - scheduleRetry(results.keySet(), retryNum); - } - - public Map> results() { - return results; - } - } - - static class NodesFeaturesUpdater implements ClusterStateTaskExecutor { - @Override - public ClusterState execute(BatchExecutionContext context) { - ClusterState.Builder builder = ClusterState.builder(context.initialState()); - var existingFeatures = builder.nodeFeatures(); - - boolean modified = false; - for (var c : context.taskContexts()) { - for (var e : c.getTask().results().entrySet()) { - // double check there are still no features for the node - if (existingFeatures.getOrDefault(e.getKey(), Set.of()).isEmpty()) { - builder.putNodeFeatures(e.getKey(), e.getValue()); - modified = true; - } - } - c.success(() -> {}); - } - return modified ? builder.build() : context.initialState(); - } - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (event.nodesDelta().masterNodeChanged() && event.localNodeMaster()) { - /* - * Execute this if we have just become master. - * Check if there are any nodes that should have features in cluster state, but don't. - * This can happen if the master was upgraded from before 8.13, and one or more non-master nodes - * were already upgraded. They don't re-join the cluster with the new master, so never get their features - * (which the master now understands) added to cluster state. - * So we need to do a separate transport call to get the node features and add them to cluster state. - * We can't use features to determine when this should happen, as the features are incorrect. - * We also can't use transport version, as that is unreliable for upgrades - * from versions before 8.8 (see TransportVersionFixupListener). - * So the only thing we can use is release version. - * This is ok here, as Serverless will never hit this case, so the node feature fetch action will never be called on Serverless. - * This whole class will be removed in ES v9. - */ - ClusterFeatures nodeFeatures = event.state().clusterFeatures(); - Set queryNodes = event.state() - .nodes() - .stream() - .filter(n -> n.getVersion().onOrAfter(Version.V_8_15_0)) - .map(DiscoveryNode::getId) - .filter(n -> getNodeFeatures(nodeFeatures, n).isEmpty()) - .collect(Collectors.toSet()); - - if (queryNodes.isEmpty() == false) { - logger.debug("Fetching actual node features for nodes {}", queryNodes); - queryNodesFeatures(queryNodes, 0); - } - } - } - - @SuppressForbidden(reason = "Need to access a specific node's features") - private static Set getNodeFeatures(ClusterFeatures features, String nodeId) { - return features.nodeFeatures().getOrDefault(nodeId, Set.of()); - } - - private void scheduleRetry(Set nodes, int thisRetryNum) { - // just keep retrying until this succeeds - logger.debug("Scheduling retry {} for nodes {}", thisRetryNum + 1, nodes); - scheduler.schedule(() -> queryNodesFeatures(nodes, thisRetryNum + 1), RETRY_TIME, executor); - } - - private void queryNodesFeatures(Set nodes, int retryNum) { - // some might already be in-progress - Set outstandingNodes = Sets.newHashSetWithExpectedSize(nodes.size()); - synchronized (pendingNodes) { - for (String n : nodes) { - if (pendingNodes.add(n)) { - outstandingNodes.add(n); - } - } - } - if (outstandingNodes.isEmpty()) { - // all nodes already have in-progress requests - return; - } - - NodesFeaturesRequest request = new NodesFeaturesRequest(outstandingNodes.toArray(String[]::new)); - client.execute(TransportNodesFeaturesAction.TYPE, request, new ActionListener<>() { - @Override - public void onResponse(NodesFeaturesResponse response) { - pendingNodes.removeAll(outstandingNodes); - handleResponse(response, retryNum); - } - - @Override - public void onFailure(Exception e) { - pendingNodes.removeAll(outstandingNodes); - logger.warn("Could not read features for nodes {}", outstandingNodes, e); - scheduleRetry(outstandingNodes, retryNum); - } - }); - } - - private void handleResponse(NodesFeaturesResponse response, int retryNum) { - if (response.hasFailures()) { - Set failedNodes = new HashSet<>(); - for (FailedNodeException fne : response.failures()) { - logger.warn("Failed to read features from node {}", fne.nodeId(), fne); - failedNodes.add(fne.nodeId()); - } - scheduleRetry(failedNodes, retryNum); - } - // carry on and read what we can - - Map> results = response.getNodes() - .stream() - .collect(Collectors.toUnmodifiableMap(n -> n.getNode().getId(), NodeFeatures::nodeFeatures)); - - if (results.isEmpty() == false) { - taskQueue.submitTask("fix-node-features", new NodesFeaturesTask(results, retryNum), null); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index fb8559b19d81d..de3343c1944c1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -37,7 +37,6 @@ import java.util.Objects; import java.util.Set; import java.util.TreeSet; -import java.util.function.Predicate; import java.util.regex.Pattern; import static java.lang.String.format; @@ -48,7 +47,6 @@ public final class DesiredNode implements Writeable, ToXContentObject, Comparable { public static final NodeFeature RANGE_FLOAT_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.range_float_processors"); - public static final NodeFeature DOUBLE_PROCESSORS_SUPPORTED = new NodeFeature("desired_node.double_processors"); public static final NodeFeature DESIRED_NODE_VERSION_DEPRECATED = new NodeFeature("desired_node.version_deprecated"); public static final TransportVersion RANGE_FLOAT_PROCESSORS_SUPPORT_TRANSPORT_VERSION = TransportVersions.V_8_3_0; @@ -348,10 +346,6 @@ public Set getRoles() { return roles; } - public boolean clusterHasRequiredFeatures(Predicate clusterHasFeature) { - return (processorsRange == null && processors.hasDecimals() == false) || clusterHasFeature.test(RANGE_FLOAT_PROCESSORS_SUPPORTED); - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index bf80c38d64a4e..279243eeff7cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -48,17 +48,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; +/** + * This class main focus is to resolve multi-syntax target expressions to resources or concrete indices. This resolution is influenced + * by IndicesOptions and other flags passed through the method call. Examples of the functionality it provides: + * - Resolve expressions to concrete indices + * - Resolve expressions to data stream names + * - Resolve expressions to resources (meaning indices, data streams and aliases) + * Note: This class is performance sensitive, so we pay extra attention on the data structure usage and we avoid streams and iterators + * when possible in favor of the classic for-i loops. + */ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); @@ -190,7 +197,7 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); return expressions.stream() .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) @@ -220,7 +227,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressionsToResources(context, request.index()); if (expressions.size() == 1) { IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); @@ -236,7 +243,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit ); } } - checkSystemIndexAccess(context, Set.of(ia.getWriteIndex())); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, ia.getWriteIndex()); return ia; } else { throw new IllegalArgumentException( @@ -245,30 +252,110 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { - if (context.getOptions().expandWildcardExpressions() == false) { + /** + * Resolve the expression to the set of indices, aliases, and, optionally, data streams that the expression matches. + * If {@param preserveDataStreams} is {@code true}, data streams that are covered by the wildcards from the + * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. + */ + protected static Collection resolveExpressionsToResources(Context context, String... expressions) { + // If we do not expand wildcards, then empty or _all expression result in an empty list + boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + if (expandWildcards == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); - } else { - return ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ); } } else { if (expressions == null || expressions.length == 0 || expressions.length == 1 && (Metadata.ALL.equals(expressions[0]) || Regex.isMatchAllPattern(expressions[0]))) { return WildcardExpressionResolver.resolveAll(context); + } else if (isNoneExpression(expressions)) { + return List.of(); + } + } + + // Using ArrayList when we know we do not have wildcards is an optimisation, given that one expression result in 0 or 1 resources. + Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) + ? new LinkedHashSet<>() + : new ArrayList<>(expressions.length); + boolean wildcardSeen = false; + for (int i = 0, n = expressions.length; i < n; i++) { + String originalExpression = expressions[i]; + + // Resolve exclusion, a `-` prefixed expression is an exclusion only if it succeeds a wildcard. + boolean isExclusion = wildcardSeen && originalExpression.startsWith("-"); + String baseExpression = isExclusion ? originalExpression.substring(1) : originalExpression; + + // Resolve date math + baseExpression = DateMathExpressionResolver.resolveExpression(baseExpression, context::getStartTime); + + // Validate base expression + validateResourceExpression(context, baseExpression, expressions); + + // Check if it's wildcard + boolean isWildcard = expandWildcards && WildcardExpressionResolver.isWildcard(originalExpression); + wildcardSeen |= isWildcard; + + if (isWildcard) { + Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); + + if (context.getOptions().allowNoIndices() == false && matchingResources.isEmpty()) { + throw notFoundException(baseExpression); + } + + if (isExclusion) { + resources.removeAll(matchingResources); + } else { + resources.addAll(matchingResources); + } } else { - return WildcardExpressionResolver.resolve( - context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ) - ); + if (isExclusion) { + resources.remove(baseExpression); + } else if (ensureAliasOrIndexExists(context, baseExpression)) { + resources.add(baseExpression); + } + } + } + return resources; + } + + /** + * Validates the requested expression by performing the following checks: + * - Ensure it's not empty + * - Ensure it doesn't start with `_` + * - Ensure it's not a remote expression unless the allow unavailable targets is enabled. + */ + private static void validateResourceExpression(Context context, String current, String[] expressions) { + if (Strings.isEmpty(current)) { + throw notFoundException(current); + } + // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API + // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, + // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown + // if the expression can't be found. + if (current.charAt(0) == '_') { + throw new InvalidIndexNameException(current, "must not start with '_'."); + } + ensureRemoteExpressionRequireIgnoreUnavailable(context.getOptions(), current, expressions); + } + + /** + * Throws an exception if the expression is a remote expression and we do not allow unavailable targets + */ + private static void ensureRemoteExpressionRequireIgnoreUnavailable(IndicesOptions options, String current, String[] expressions) { + if (options.ignoreUnavailable()) { + return; + } + if (RemoteClusterAware.isRemoteIndexName(current)) { + List crossClusterIndices = new ArrayList<>(); + for (int i = 0; i < expressions.length; i++) { + if (RemoteClusterAware.isRemoteIndexName(expressions[i])) { + crossClusterIndices.add(expressions[i]); + } } + throw new IllegalArgumentException( + "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices + ); } } @@ -341,7 +428,7 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); @@ -395,7 +482,9 @@ Index[] concreteIndices(Context context, String... indexExpressions) { && context.getOptions().includeFailureIndices()) { // Collect the data streams involved Set aliasDataStreams = new HashSet<>(); - for (Index index : indexAbstraction.getIndices()) { + List indices = indexAbstraction.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); aliasDataStreams.add(indicesLookup.get(index.getName()).getParentDataStream()); } for (DataStream dataStream : aliasDataStreams) { @@ -416,13 +505,16 @@ Index[] concreteIndices(Context context, String... indexExpressions) { if (context.getOptions().allowNoIndices() == false && concreteIndicesResult.isEmpty()) { throw notFoundException(indexExpressions); } - checkSystemIndexAccess(context, concreteIndicesResult); - return concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + Index[] resultArray = concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, resultArray); + return resultArray; } private static void resolveIndicesForDataStream(Context context, DataStream dataStream, Set concreteIndicesResult) { if (shouldIncludeRegularIndices(context.getOptions())) { - for (Index index : dataStream.getIndices()) { + List indices = dataStream.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -431,7 +523,9 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions())) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices().getIndices()) { + List failureIndices = dataStream.getFailureIndices().getIndices(); + for (int i = 0, n = failureIndices.size(); i < n; i++) { + Index index = failureIndices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -482,64 +576,6 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract return indexAbstraction.getIndices().size() > 1; } - private void checkSystemIndexAccess(Context context, Set concreteIndices) { - final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); - if (systemIndexAccessPredicate == Predicates.always()) { - return; - } - doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); - } - - private void doCheckSystemIndexAccess(Context context, Set concreteIndices, Predicate systemIndexAccessPredicate) { - final Metadata metadata = context.getState().metadata(); - final List resolvedSystemIndices = new ArrayList<>(); - final List resolvedNetNewSystemIndices = new ArrayList<>(); - final Set resolvedSystemDataStreams = new HashSet<>(); - final SortedMap indicesLookup = metadata.getIndicesLookup(); - boolean matchedIndex = false; - for (Index concreteIndex : concreteIndices) { - IndexMetadata idxMetadata = metadata.index(concreteIndex); - String name = concreteIndex.getName(); - if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { - matchedIndex = true; - IndexAbstraction indexAbstraction = indicesLookup.get(name); - if (indexAbstraction.getParentDataStream() != null) { - resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); - } else if (systemIndices.isNetNewSystemIndex(name)) { - resolvedNetNewSystemIndices.add(name); - } else { - resolvedSystemIndices.add(name); - } - } - } - if (matchedIndex) { - handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices); - } - } - - private void handleMatchedSystemIndices( - List resolvedSystemIndices, - Set resolvedSystemDataStreams, - List resolvedNetNewSystemIndices - ) { - if (resolvedSystemIndices.isEmpty() == false) { - Collections.sort(resolvedSystemIndices); - deprecationLogger.warn( - DeprecationCategory.API, - "open_system_index_access", - "this request accesses system indices: {}, but in a future major version, direct access to system " - + "indices will be prevented by default", - resolvedSystemIndices - ); - } - if (resolvedSystemDataStreams.isEmpty() == false) { - throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); - } - if (resolvedNetNewSystemIndices.isEmpty() == false) { - throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); - } - } - private static IndexNotFoundException notFoundException(String... indexExpressions) { final IndexNotFoundException infe; if (indexExpressions == null @@ -568,16 +604,16 @@ private static IndexNotFoundException notFoundException(String... indexExpressio } private static boolean shouldTrackConcreteIndex(Context context, Index index) { - if (context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY - && context.netNewSystemIndexPredicate.test(index.getName())) { + if (SystemResourceAccess.isNetNewInBackwardCompatibleMode(context, index)) { // Exclude this one as it's a net-new system index, and we explicitly don't want those. return false; } + IndicesOptions options = context.getOptions(); if (DataStream.isFailureStoreFeatureFlagEnabled() && context.options.allowFailureIndices() == false) { DataStream parentDataStream = context.getState().metadata().getIndicesLookup().get(index.getName()).getParentDataStream(); if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { if (parentDataStream.isFailureStoreIndex(index.getName())) { - if (context.options.ignoreUnavailable()) { + if (options.ignoreUnavailable()) { return false; } else { throw new FailureIndexNotSupportedException(index); @@ -587,7 +623,6 @@ private static boolean shouldTrackConcreteIndex(Context context, Index index) { } final IndexMetadata imd = context.state.metadata().index(index); if (imd.getState() == IndexMetadata.State.CLOSE) { - IndicesOptions options = context.options; if (options.forbidClosedIndices() && options.ignoreUnavailable() == false) { throw new IndexClosedException(index); } else { @@ -721,21 +756,6 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) return state.metadata().hasIndexAbstraction(resolvedAliasOrIndex); } - /** - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression) { - return DateMathExpressionResolver.resolveExpression(dateExpression); - } - - /** - * @param time instant to consider when parsing the expression - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression, long time) { - return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); - } - /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ @@ -765,7 +785,8 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); + // unmodifiable without creating a new collection as it might contain many items + Collection resolved = resolveExpressionsToResources(context, expressions); if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items return Collections.unmodifiableSet((Set) resolved); @@ -779,7 +800,7 @@ public Set resolveExpressions( * given index. *

Only aliases with filters are returned. If the indices list contains a non-filtering reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. + * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); @@ -799,7 +820,8 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * Iterates through the list of indices and selects the effective list of required aliases for the given index. *

Only aliases where the given predicate tests successfully are returned. If the indices list contains a non-required reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - *

NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. + *

NOTE: the provided expressions must have been resolved already via + * {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] indexAliases( ClusterState state, @@ -878,7 +900,8 @@ public String[] indexAliases( .toArray(AliasMetadata[]::new); } List aliases = null; - for (AliasMetadata aliasMetadata : aliasCandidates) { + for (int i = 0; i < aliasCandidates.length; i++) { + AliasMetadata aliasMetadata = aliasCandidates[i]; if (requiredAlias.test(aliasMetadata)) { // If required - add it to the list of aliases if (aliases == null) { @@ -914,7 +937,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); // TODO: it appears that this can never be true? if (isAllIndices(resolvedExpressions)) { @@ -932,7 +955,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab for (String expression : resolvedExpressions) { IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { - for (Index index : indexAbstraction.getIndices()) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); String concreteIndex = index.getName(); if (norouting.contains(concreteIndex) == false) { AliasMetadata aliasMetadata = state.metadata().index(concreteIndex).getAliases().get(indexAbstraction.getName()); @@ -961,7 +985,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab continue; } if (dataStream.getIndices() != null) { - for (Index index : dataStream.getIndices()) { + for (int i = 0, n = dataStream.getIndices().size(); i < n; i++) { + Index index = dataStream.getIndices().get(i); String concreteIndex = index.getName(); routings = collectRoutings(routings, paramRouting, norouting, concreteIndex); } @@ -1006,8 +1031,8 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m Set r = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); Map> routings = new HashMap<>(); String[] concreteIndices = metadata.getConcreteAllIndices(); - for (String index : concreteIndices) { - routings.put(index, r); + for (int i = 0; i < concreteIndices.length; i++) { + routings.put(concreteIndices[i], r); } return routings; } @@ -1036,6 +1061,16 @@ static boolean isExplicitAllPattern(Collection aliasesOrIndices) { return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && Metadata.ALL.equals(aliasesOrIndices.iterator().next()); } + /** + * Identifies if this expression list is *,-* which effectively means a request that requests no indices. + */ + static boolean isNoneExpression(String[] expressions) { + return expressions.length == 2 && "*".equals(expressions[0]) && "-*".equals(expressions[1]); + } + + /** + * @return the system access level that will be applied in this resolution. See {@link SystemIndexAccessLevel} for details. + */ public SystemIndexAccessLevel getSystemIndexAccessLevel() { final SystemIndexAccessLevel accessLevel = SystemIndices.getSystemIndexAccessLevel(threadContext); assert accessLevel != SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY @@ -1043,6 +1078,14 @@ public SystemIndexAccessLevel getSystemIndexAccessLevel() { return accessLevel; } + /** + * Determines the right predicate based on the {@link IndexNameExpressionResolver#getSystemIndexAccessLevel()}. Specifically: + * - NONE implies no access to net-new system indices and data streams + * - BACKWARDS_COMPATIBLE_ONLY allows access also to net-new system resources + * - ALL allows access to everything + * - otherwise we fall back to {@link SystemIndices#getProductSystemIndexNamePredicate(ThreadContext)} + * @return the predicate that defines the access to system indices. + */ public Predicate getSystemIndexAccessPredicate() { final SystemIndexAccessLevel systemIndexAccessLevel = getSystemIndexAccessLevel(); final Predicate systemIndexAccessLevelPredicate; @@ -1067,6 +1110,43 @@ public Predicate getNetNewSystemIndexPredicate() { return systemIndices::isNetNewSystemIndex; } + /** + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * exception. + */ + @Nullable + private static boolean ensureAliasOrIndexExists(Context context, String name) { + boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); + IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); + if (indexAbstraction == null) { + if (ignoreUnavailable) { + return false; + } else { + throw notFoundException(name); + } + } + // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) + if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { + if (ignoreUnavailable) { + return false; + } else { + throw aliasesNotSupportedException(name); + } + } + if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { + if (ignoreUnavailable) { + return false; + } else { + IndexNotFoundException infe = notFoundException(name); + // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. + infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); + throw infe; + } + } + return true; + } + public static class Context { private final ClusterState state; @@ -1242,7 +1322,7 @@ public Predicate getSystemIndexAccessPredicate() { } /** - * Resolves alias/index name expressions with wildcards into the corresponding concrete indices/aliases + * Resolves name expressions with wildcards into the corresponding concrete indices/aliases/data streams */ static final class WildcardExpressionResolver { @@ -1251,8 +1331,8 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. - * Depending on the context, returns the names of the datastreams themselves or their backing indices. + * Returns all the indices, data streams, and aliases, considering the open/closed, system, and hidden context parameters. + * Depending on the context, returns the names of the data streams themselves or their backing indices. */ public static Collection resolveAll(Context context) { List concreteIndices = resolveEmptyOrTrivialWildcard(context); @@ -1261,16 +1341,17 @@ public static Collection resolveAll(Context context) { return concreteIndices; } - Stream ias = context.getState() + Set resolved = new HashSet<>(concreteIndices.size()); + context.getState() .metadata() .getIndicesLookup() .values() .stream() .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) - .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())) + .forEach(ia -> resolved.addAll(expandToOpenClosed(context, ia))); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1283,73 +1364,6 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } - /** - * Returns all the existing resource (index, alias and datastream) names that the {@param expressions} list resolves to. - * The passed-in {@param expressions} can contain wildcards and exclusions, as well as plain resource names. - *
- * The return is a {@code Collection} (usually a {@code Set} but can also be a {@code List}, for performance reasons) of plain - * resource names only. All the returned resources are "accessible", in the given context, i.e. the resources exist - * and are not an alias or a datastream if the context does not permit it. - * Wildcard expressions, depending on the context: - *

    - *
  1. might throw an exception if they don't resolve to anything
  2. - *
  3. might not resolve to hidden or system resources (but plain names can refer to hidden or system resources)
  4. - *
  5. might resolve to aliases and datastreams, and it could be (depending on the context) that their backing indices are what's - * ultimately returned, instead of the alias or datastream name
  6. - *
- */ - public static Collection resolve(Context context, List expressions) { - // fast exit if there are no wildcards to evaluate - if (context.getOptions().expandWildcardExpressions() == false) { - return expressions; - } - int firstWildcardIndex = 0; - for (; firstWildcardIndex < expressions.size(); firstWildcardIndex++) { - String expression = expressions.get(firstWildcardIndex); - if (isWildcard(expression)) { - break; - } - } - if (firstWildcardIndex == expressions.size()) { - return expressions; - } - Set result = new HashSet<>(); - for (int i = 0; i < firstWildcardIndex; i++) { - result.add(expressions.get(i)); - } - AtomicBoolean emptyWildcardExpansion = context.getOptions().allowNoIndices() ? null : new AtomicBoolean(); - for (int i = firstWildcardIndex; i < expressions.size(); i++) { - String expression = expressions.get(i); - boolean isExclusion = i > firstWildcardIndex && expression.charAt(0) == '-'; - if (i == firstWildcardIndex || isWildcard(expression)) { - Stream matchingResources = matchResourcesToWildcard( - context, - isExclusion ? expression.substring(1) : expression - ); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); - if (emptyWildcardExpansion != null) { - emptyWildcardExpansion.set(true); - matchingOpenClosedNames = matchingOpenClosedNames.peek(x -> emptyWildcardExpansion.set(false)); - } - if (isExclusion) { - matchingOpenClosedNames.forEach(result::remove); - } else { - matchingOpenClosedNames.forEach(result::add); - } - if (emptyWildcardExpansion != null && emptyWildcardExpansion.get()) { - throw notFoundException(expression); - } - } else { - if (isExclusion) { - result.remove(expression.substring(1)); - } else { - result.add(expression); - } - } - } - return result; - } - private static IndexMetadata.State excludeState(IndicesOptions options) { final IndexMetadata.State excludeState; if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { @@ -1366,55 +1380,82 @@ private static IndexMetadata.State excludeState(IndicesOptions options) { } /** - * Given a single wildcard {@param expression}, return the {@code Stream} that contains all the resources (i.e. indices, aliases, - * and datastreams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's + * Given a single wildcard {@param expression}, return a {@code Set} that contains all the resources (i.e. indices, aliases, + * and data streams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's * name matches the {@param expression} wildcard). * The {@param context} provides the current time-snapshot view of cluster state, as well as conditions - * on whether to consider alias, datastream, system, and hidden resources. - * It does NOT consider the open or closed status of index resources. + * on whether to consider alias, data stream, system, and hidden resources. */ - private static Stream matchResourcesToWildcard(Context context, String wildcardExpression) { + static Set matchWildcardToResources(Context context, String wildcardExpression) { assert isWildcard(wildcardExpression); final SortedMap indicesLookup = context.getState().getMetadata().getIndicesLookup(); - Stream matchesStream; + Set matchedResources = new HashSet<>(); + // this applies an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" if (Regex.isSuffixMatchPattern(wildcardExpression)) { - // this is an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" - matchesStream = filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values().stream(); - } else { - matchesStream = indicesLookup.values().stream(); - if (Regex.isMatchAllPattern(wildcardExpression) == false) { - matchesStream = matchesStream.filter( - indexAbstraction -> Regex.simpleMatch(wildcardExpression, indexAbstraction.getName()) - ); + for (IndexAbstraction ia : filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); + } + return matchedResources; + } + // In case of match all it fetches all index abstractions + if (Regex.isMatchAllPattern(wildcardExpression)) { + for (IndexAbstraction ia : indicesLookup.values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); } + return matchedResources; } - if (context.getOptions().ignoreAliases()) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.getType() != Type.ALIAS); + for (IndexAbstraction indexAbstraction : indicesLookup.values()) { + if (Regex.simpleMatch(wildcardExpression, indexAbstraction.getName())) { + maybeAddToResult(context, wildcardExpression, indexAbstraction, matchedResources); + } } - if (context.includeDataStreams() == false) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isDataStreamRelated() == false); + return matchedResources; + } + + private static void maybeAddToResult( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction, + Set matchedResources + ) { + if (shouldExpandToIndexAbstraction(context, wildcardExpression, indexAbstraction)) { + matchedResources.addAll(expandToOpenClosed(context, indexAbstraction)); } - // historic, i.e. not net-new, system indices are included irrespective of the system access predicate - // the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || (indexAbstraction.getType() != Type.DATA_STREAM - && indexAbstraction.getParentDataStream() == null - && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false) - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); + } + + /** + * Checks if this index abstraction should be included because it matched the wildcard expression. + * @param context the options of this request that influence the decision if this index abstraction should be included in the result + * @param wildcardExpression the wildcard expression that matched this index abstraction + * @param indexAbstraction the index abstraction in question + * @return true, if the index abstraction should be included in the result + */ + private static boolean shouldExpandToIndexAbstraction( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction + ) { + if (context.getOptions().ignoreAliases() && indexAbstraction.getType() == Type.ALIAS) { + return false; + } + if (context.includeDataStreams() == false && indexAbstraction.isDataStreamRelated()) { + return false; + } + + if (indexAbstraction.isSystem() + && SystemResourceAccess.shouldExpandToSystemIndexAbstraction(context, indexAbstraction) == false) { + return false; + } + if (context.getOptions().expandWildcardsHidden() == false) { - if (wildcardExpression.startsWith(".")) { - // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also - // starts with "." - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isHidden() == false || indexAbstraction.getName().startsWith(".") - ); - } else { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isHidden() == false); + // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also + // starts with "." + if (indexAbstraction.isHidden() + && (wildcardExpression.startsWith(".") && indexAbstraction.getName().startsWith(".")) == false) { + return false; } } - return matchesStream; + return true; } private static Map filterIndicesLookupForSuffixWildcard( @@ -1430,35 +1471,39 @@ private static Map filterIndicesLookupForSuffixWildcar } /** - * Return the {@code Stream} of open and/or closed index names for the given {@param resources}. + * Return the {@code Set} of open and/or closed index names for the given {@param resources}. * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); - return resources.flatMap(indexAbstraction -> { - if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); - } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); - } else { - Stream indicesStateStream = Stream.of(); - if (shouldIncludeRegularIndices(context.getOptions())) { - indicesStateStream = indexAbstraction.getIndices().stream().map(context.state.metadata()::index); - } - if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { - DataStream dataStream = (DataStream) indexAbstraction; - indicesStateStream = Stream.concat( - indicesStateStream, - dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) - ); + Set resources = new HashSet<>(); + if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { + resources.add(indexAbstraction.getName()); + } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { + resources.add(indexAbstraction.getName()); + } else { + if (shouldIncludeRegularIndices(context.getOptions())) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - if (excludeState != null) { - indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); + } + if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { + DataStream dataStream = (DataStream) indexAbstraction; + for (int i = 0, n = dataStream.getFailureIndices().getIndices().size(); i < n; i++) { + Index index = dataStream.getFailureIndices().getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } - }); + } + return resources; } private static List resolveEmptyOrTrivialWildcard(Context context) { @@ -1471,26 +1516,26 @@ private static List resolveEmptyOrTrivialWildcard(Context context) { } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { - return Arrays.stream(allIndices).filter(name -> { - if (name.startsWith(".")) { - IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); - assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; - if (abstraction.isSystem()) { - if (context.netNewSystemIndexPredicate.test(name)) { - if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { - return false; - } else { - return context.systemIndexAccessPredicate.test(name); - } - } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { - return context.systemIndexAccessPredicate.test(name); - } - } else { - return true; - } + List filteredIndices = new ArrayList<>(allIndices.length); + for (int i = 0; i < allIndices.length; i++) { + if (shouldIncludeIndexAbstraction(context, allIndices[i])) { + filteredIndices.add(allIndices[i]); } + } + return filteredIndices; + } + + private static boolean shouldIncludeIndexAbstraction(Context context, String name) { + if (name.startsWith(".") == false) { return true; - }).toList(); + } + + IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); + assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; + if (abstraction.isSystem() == false) { + return true; + } + return SystemResourceAccess.isSystemIndexAbstractionAccessible(context, abstraction); } private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions options, Metadata metadata) { @@ -1513,8 +1558,39 @@ private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions return Strings.EMPTY_ARRAY; } } + + static boolean isWildcard(String expression) { + return Regex.isSimpleMatchPattern(expression); + } + + static boolean hasWildcards(String[] expressions) { + for (int i = 0; i < expressions.length; i++) { + if (isWildcard(expressions[i])) { + return true; + } + } + return false; + } + } + + /** + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression) { + return DateMathExpressionResolver.resolveExpression(dateExpression); + } + + /** + * @param time instant to consider when parsing the expression + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression, long time) { + return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); } + /** + * Resolves a date math expression based on the requested time. + */ public static final class DateMathExpressionResolver { private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); @@ -1530,35 +1606,18 @@ private DateMathExpressionResolver() { } /** - * Resolves date math expressions. If this is a noop the given {@code expressions} list is returned without copying. - * As a result callers of this method should not mutate the returned list. Mutating it may come with unexpected side effects. + * Resolves a date math expression using the current time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. */ - public static List resolve(Context context, List expressions) { - boolean wildcardSeen = false; - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - String[] result = null; - for (int i = 0, n = expressions.size(); i < n; i++) { - String expression = expressions.get(i); - // accepts date-math exclusions that are of the form "-<...{}>",f i.e. the "-" is outside the "<>" date-math template - boolean isExclusion = wildcardSeen && expression.startsWith("-"); - wildcardSeen = wildcardSeen || (expandWildcards && isWildcard(expression)); - String toResolve = isExclusion ? expression.substring(1) : expression; - String resolved = resolveExpression(toResolve, context::getStartTime); - if (toResolve != resolved) { - if (result == null) { - result = expressions.toArray(Strings.EMPTY_ARRAY); - } - result[i] = isExclusion ? "-" + resolved : resolved; - } - } - return result == null ? expressions : Arrays.asList(result); - } - - static String resolveExpression(String expression) { + public static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(String expression, LongSupplier getTime) { + /** + * Resolves a date math expression using the provided time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. + */ + public static String resolveExpression(String expression, LongSupplier getTime) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; } @@ -1707,135 +1766,133 @@ private static String doResolveExpression(String expression, LongSupplier getTim } } - public static final class ExplicitResourceNameFilter { + /** + * In this class we collect the system access relevant code. The helper methods provide the following functionalities: + * - determining the access to a system index abstraction + * - verifying the access to system abstractions and adding the necessary warnings + * - determining the access to a system index based on its name + * WARNING: we have observed differences in how the access is determined. For now this behaviour is documented and preserved. + */ + public static final class SystemResourceAccess { - private ExplicitResourceNameFilter() { + private SystemResourceAccess() { // Utility class } /** - * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. - * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. + * Checks if this system index abstraction should be included when resolving via {@link + * IndexNameExpressionResolver.WildcardExpressionResolver#resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context, String[])}. + * NOTE: it behaves differently than {@link SystemResourceAccess#shouldExpandToSystemIndexAbstraction(Context, IndexAbstraction)} + * because in the case that the access level is BACKWARDS_COMPATIBLE_ONLY it does not include the net-new indices, this is + * questionable. */ - public static List filterUnavailable(Context context, List expressions) { - ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - boolean wildcardSeen = false; - List result = null; - for (int i = 0; i < expressions.size(); i++) { - String expression = expressions.get(i); - if (Strings.isEmpty(expression)) { - throw notFoundException(expression); - } - // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API - // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, - // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown - // if the expression can't be found. - if (expression.charAt(0) == '_') { - throw new InvalidIndexNameException(expression, "must not start with '_'."); - } - final boolean isWildcard = expandWildcards && isWildcard(expression); - if (isWildcard || (wildcardSeen && expression.charAt(0) == '-') || ensureAliasOrIndexExists(context, expression)) { - if (result != null) { - result.add(expression); - } + public static boolean isSystemIndexAbstractionAccessible(Context context, IndexAbstraction abstraction) { + assert abstraction.isSystem() : "We should only check this for system resources"; + if (context.netNewSystemIndexPredicate.test(abstraction.getName())) { + if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { + return false; } else { - if (result == null) { - result = new ArrayList<>(expressions.size() - 1); - result.addAll(expressions.subList(0, i)); - } + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - wildcardSeen |= isWildcard; + } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - return result == null ? expressions : result; + return true; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of - * exception. + * Historic, i.e. not net-new, system indices are included irrespective of the system access predicate + * the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature. + * A historic system resource, can only be an index since system data streams were added later. */ - @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { - boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); - IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); - if (indexAbstraction == null) { - if (ignoreUnavailable) { - return false; - } else { - throw notFoundException(name); - } - } - // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) - if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { - if (ignoreUnavailable) { - return false; - } else { - throw aliasesNotSupportedException(name); - } - } - if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { - if (ignoreUnavailable) { - return false; - } else { - IndexNotFoundException infe = notFoundException(name); - // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. - infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); - throw infe; - } - } - return true; + private static boolean shouldExpandToSystemIndexAbstraction(Context context, IndexAbstraction indexAbstraction) { + assert indexAbstraction.isSystem() : "We should only check this for system resources"; + boolean isHistoric = indexAbstraction.getType() != Type.DATA_STREAM + && indexAbstraction.getParentDataStream() == null + && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false; + return isHistoric || context.systemIndexAccessPredicate.test(indexAbstraction.getName()); } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { - if (options.ignoreUnavailable()) { + /** + * Checks if any system indices that should not have been accessible according to the + * {@link Context#getSystemIndexAccessPredicate()} are accessed, and it performs the following actions: + * - if there are historic (aka not net-new) system indices, then it adds a deprecation warning + * - if it contains net-new system indices or system data streams, it throws an exception. + */ + private static void checkSystemIndexAccess(Context context, ThreadContext threadContext, Index... concreteIndices) { + final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); + if (systemIndexAccessPredicate == Predicates.always()) { return; } - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); - } - } + doCheckSystemIndexAccess(context, systemIndexAccessPredicate, threadContext, concreteIndices); } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { - List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - crossClusterIndices.add(index); + private static void doCheckSystemIndexAccess( + Context context, + Predicate systemIndexAccessPredicate, + ThreadContext threadContext, + Index... concreteIndices + ) { + final Metadata metadata = context.getState().metadata(); + final List resolvedSystemIndices = new ArrayList<>(); + final List resolvedNetNewSystemIndices = new ArrayList<>(); + final Set resolvedSystemDataStreams = new HashSet<>(); + final SortedMap indicesLookup = metadata.getIndicesLookup(); + boolean matchedIndex = false; + for (int i = 0; i < concreteIndices.length; i++) { + Index concreteIndex = concreteIndices[i]; + IndexMetadata idxMetadata = metadata.index(concreteIndex); + String name = concreteIndex.getName(); + if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { + matchedIndex = true; + IndexAbstraction indexAbstraction = indicesLookup.get(name); + if (indexAbstraction.getParentDataStream() != null) { + resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); + } else if (context.netNewSystemIndexPredicate.test(name)) { + resolvedNetNewSystemIndices.add(name); + } else { + resolvedSystemIndices.add(name); + } } } - throw new IllegalArgumentException( - "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices - ); - } - } - - /** - * This is a context for the DateMathExpressionResolver which does not require {@code IndicesOptions} or {@code ClusterState} - * since it uses only the start time to resolve expressions. - */ - public static final class ResolverContext extends Context { - public ResolverContext() { - this(System.currentTimeMillis()); - } - - public ResolverContext(long startTime) { - super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, Predicates.never(), Predicates.never()); + if (matchedIndex) { + handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices, threadContext); + } } - @Override - public ClusterState getState() { - throw new UnsupportedOperationException("should never be called"); + private static void handleMatchedSystemIndices( + List resolvedSystemIndices, + Set resolvedSystemDataStreams, + List resolvedNetNewSystemIndices, + ThreadContext threadContext + ) { + if (resolvedSystemIndices.isEmpty() == false) { + Collections.sort(resolvedSystemIndices); + deprecationLogger.warn( + DeprecationCategory.API, + "open_system_index_access", + "this request accesses system indices: {}, but in a future major version, direct access to system " + + "indices will be prevented by default", + resolvedSystemIndices + ); + } + if (resolvedSystemDataStreams.isEmpty() == false) { + throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); + } + if (resolvedNetNewSystemIndices.isEmpty() == false) { + throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); + } } - @Override - public IndicesOptions getOptions() { - throw new UnsupportedOperationException("should never be called"); + /** + * Used in {@link IndexNameExpressionResolver#shouldTrackConcreteIndex(Context, Index)} to exclude net-new indices + * when we are in backwards compatible only access level. + * This also feels questionable as well. + */ + private static boolean isNetNewInBackwardCompatibleMode(Context context, Index index) { + return context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY + && context.netNewSystemIndexPredicate.test(index.getName()); } } - private static boolean isWildcard(String expression) { - return Regex.isSimpleMatchPattern(expression); - } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java index 89f776a7ada0f..49bd38330e3af 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataFeatures.java @@ -9,24 +9,12 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; -import java.util.Map; import java.util.Set; public class MetadataFeatures implements FeatureSpecification { - @Override - public Map getHistoricalFeatures() { - return Map.of( - DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED, - Version.V_8_3_0, - DesiredNode.DOUBLE_PROCESSORS_SUPPORTED, - Version.V_8_5_0 - ); - } - @Override public Set getFeatures() { return Set.of(DesiredNode.DESIRED_NODE_VERSION_DEPRECATED); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index d6ed28454df96..3878a3329b634 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -782,7 +782,7 @@ private void validateUseOfDeprecatedIngestPipelines(String name, IngestMetadata private void emitWarningIfPipelineIsDeprecated(String name, Map pipelines, String pipelineName) { Optional.ofNullable(pipelineName) .map(pipelines::get) - .filter(p -> Boolean.TRUE.equals(p.getConfigAsMap().get("deprecated"))) + .filter(p -> Boolean.TRUE.equals(p.getConfig().get("deprecated"))) .ifPresent( p -> deprecationLogger.warn( DeprecationCategory.TEMPLATES, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index f7812d284f2af..13fc874f52e9f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,6 +32,7 @@ import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; public class OperationRouting { @@ -305,8 +306,14 @@ public ShardId shardId(ClusterState clusterState, String index, String id, @Null } public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { + // TODO: remove if and always return isSearchable (ES-9563) if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - return shardRouting.isPromotableToPrimary(); + // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. + if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO_2)) { + return shardRouting.isSearchable(); + } else { + return shardRouting.isPromotableToPrimary(); + } } else { return shardRouting.isSearchable(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java index bfe8a20f18043..72261df658ca1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocator.java @@ -131,6 +131,10 @@ protected void processInput(DesiredBalanceInput desiredBalanceInput) { recordTime( cumulativeComputationTime, + // We set currentDesiredBalance back to INITIAL when the node stands down as master in onNoLongerMaster. + // However, it is possible that we revert the effect here by setting it again since the computation is async + // and does not check whether the node is master. This should have little to no practical impact. But it may + // lead to unexpected behaviours for tests. See also https://github.com/elastic/elasticsearch/pull/116904 () -> setCurrentDesiredBalance( desiredBalanceComputer.compute( getInitialDesiredBalance(), @@ -213,6 +217,10 @@ public void allocate(RoutingAllocation allocation, ActionListener listener queue.add(index, listener); desiredBalanceComputation.onNewInput(DesiredBalanceInput.create(index, allocation)); + if (allocation.routingTable().indicesRouting().isEmpty()) { + logger.debug("No eager reconciliation needed for empty routing table"); + return; + } // Starts reconciliation towards desired balance that might have not been updated with a recent calculation yet. // This is fine as balance should have incremental rather than radical changes. // This should speed up achieving the desired balance in cases current state is still different from it (due to THROTTLING). diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 9cfa22d0a3cfb..9464ccbcc7aa3 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.internal.XContentParserDecorator; import org.elasticsearch.xcontent.DeprecationHandler; @@ -626,7 +627,22 @@ public static BytesReference toXContent(ChunkedToXContent toXContent, XContentTy */ public static BytesReference toXContent(ToXContent toXContent, XContentType xContentType, Params params, boolean humanReadable) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + return toXContent(toXContent, xContentType, RestApiVersion.current(), params, humanReadable); + } + + /** + * Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided + * {@link XContentType}. Wraps the output into a new anonymous object according to the value returned + * by the {@link ToXContent#isFragment()} method returns. + */ + public static BytesReference toXContent( + ToXContent toXContent, + XContentType xContentType, + RestApiVersion restApiVersion, + Params params, + boolean humanReadable + ) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent(), restApiVersion)) { builder.humanReadable(humanReadable); if (toXContent.isFragment()) { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index 6a553d5dc5440..8c997a9766baa 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; @@ -54,7 +55,7 @@ Settings getAdditionalIndexSettings( /** * Infrastructure class that holds services that can be used by {@link IndexSettingProvider} instances. */ - record Parameters(CheckedFunction mapperServiceFactory) { + record Parameters(ClusterService clusterService, CheckedFunction mapperServiceFactory) { } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 5746bea12a2d8..7a5f469a57fa1 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -134,6 +134,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); public static final IndexVersion LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT = def(9_001_00_0, Version.LUCENE_10_0_0); public static final IndexVersion TIME_BASED_K_ORDERED_DOC_ID = def(9_002_00_0, Version.LUCENE_10_0_0); + public static final IndexVersion DEPRECATE_SOURCE_MODE_MAPPER = def(9_003_00_0, Version.LUCENE_10_0_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index f80e8a89f5cf2..43ae38fea6018 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -105,6 +105,9 @@ public class IndexingPressure { private final AtomicLong replicaRejections = new AtomicLong(0); private final AtomicLong primaryDocumentRejections = new AtomicLong(0); + private final AtomicLong lowWaterMarkSplits = new AtomicLong(0); + private final AtomicLong highWaterMarkSplits = new AtomicLong(0); + private final long lowWatermark; private final long lowWatermarkSize; private final long highWatermark; @@ -265,11 +268,20 @@ public Releasable markReplicaOperationStarted(int operations, long bytes, boolea public boolean shouldSplitBulk(long size) { long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); - return (currentUsage >= lowWatermark && size >= lowWatermarkSize) || (currentUsage >= highWatermark && size >= highWatermarkSize); + if (currentUsage >= highWatermark && size >= highWatermarkSize) { + highWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to high watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + if (currentUsage >= lowWatermark && size >= lowWatermarkSize) { + lowWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to low watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + return (false); } public IndexingPressureStats stats() { - // TODO: Update stats with new primary/replica/coordinating limits and add throttling stats return new IndexingPressureStats( totalCombinedCoordinatingAndPrimaryBytes.get(), totalCoordinatingBytes.get(), @@ -290,7 +302,9 @@ public IndexingPressureStats stats() { currentPrimaryOps.get(), currentReplicaOps.get(), primaryDocumentRejections.get(), - totalCoordinatingRequests.get() + totalCoordinatingRequests.get(), + lowWaterMarkSplits.get(), + highWaterMarkSplits.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 59607fadc0dd9..33a8487bb33a3 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -58,8 +58,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; - /** * This is a cache for {@link BitDocIdSet} based filters and is unbounded by size or time. *

@@ -105,10 +103,7 @@ static boolean shouldLoadRandomAccessFiltersEagerly(IndexSettings settings) { boolean loadFiltersEagerlySetting = settings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); boolean isStateless = DiscoveryNode.isStateless(settings.getNodeSettings()); if (isStateless) { - return loadFiltersEagerlySetting - && (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE) - || (DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.INDEX_ROLE) - && INDEX_FAST_REFRESH_SETTING.get(settings.getSettings()))); + return loadFiltersEagerlySetting && DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE); } else { return loadFiltersEagerlySetting; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 9a2c9517dfd05..31aa787c3f758 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -10,16 +10,13 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Iterator; import java.util.Map; -import java.util.Set; import java.util.function.Function; /** @@ -135,8 +132,6 @@ public final MetadataFieldMapper build(MapperBuilderContext context) { return build(); } - private static final Set UNSUPPORTED_PARAMETERS_8_6_0 = Set.of("type", "fields", "copy_to", "boost"); - public final void parseMetadataField(String name, MappingParserContext parserContext, Map fieldNode) { final Parameter[] params = getParameters(); Map> paramsMap = Maps.newHashMapWithExpectedSize(params.length); @@ -149,20 +144,6 @@ public final void parseMetadataField(String name, MappingParserContext parserCon final Object propNode = entry.getValue(); Parameter parameter = paramsMap.get(propName); if (parameter == null) { - if (UNSUPPORTED_PARAMETERS_8_6_0.contains(propName)) { - if (parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_6_0)) { - // silently ignore type, and a few other parameters: sadly we've been doing this for a long time - deprecationLogger.warn( - DeprecationCategory.API, - propName, - "Parameter [{}] has no effect on metadata field [{}] and will be removed in future", - propName, - name - ); - } - iterator.remove(); - continue; - } throw new MapperParsingException("unknown parameter [" + propName + "] on metadata field [" + name + "]"); } parameter.parse(name, parserContext, propNode); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index dd25cd6eb80a3..e5b12f748543f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -38,6 +39,7 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; public class SourceFieldMapper extends MetadataFieldMapper { public static final NodeFeature SYNTHETIC_SOURCE_FALLBACK = new NodeFeature("mapper.source.synthetic_source_fallback"); @@ -68,6 +70,9 @@ public class SourceFieldMapper extends MetadataFieldMapper { return indexMode.defaultSourceMode().name(); }, "index.mapping.source.mode", value -> {}, Setting.Property.Final, Setting.Property.IndexScope); + public static final String DEPRECATION_WARNING = "Configuring source mode in mappings is deprecated and will be removed " + + "in future versions. Use [index.mapping.source.mode] index setting instead."; + /** The source mode */ public enum Mode { DISABLED, @@ -79,28 +84,32 @@ public enum Mode { null, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper STORED = new SourceFieldMapper( Mode.STORED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper SYNTHETIC = new SourceFieldMapper( Mode.SYNTHETIC, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); private static final SourceFieldMapper DISABLED = new SourceFieldMapper( Mode.DISABLED, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY + Strings.EMPTY_ARRAY, + false ); public static class Defaults { @@ -134,16 +143,7 @@ public static class Builder extends MetadataFieldMapper.Builder { * The default mode for TimeSeries is left empty on purpose, so that mapping printings include the synthetic * source mode. */ - private final Parameter mode = new Parameter<>( - "mode", - true, - () -> null, - (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), - m -> toType(m).enabled.explicit() ? null : toType(m).mode, - (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), - v -> v.toString().toLowerCase(Locale.ROOT) - ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) - .setSerializerCheck((includeDefaults, isConfigured, value) -> value != null); // don't emit if `enabled` is configured + private final Parameter mode; private final Parameter> includes = Parameter.stringArrayParam( "includes", false, @@ -158,15 +158,28 @@ public static class Builder extends MetadataFieldMapper.Builder { private final Settings settings; private final IndexMode indexMode; + private boolean serializeMode; private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams, boolean serializeMode) { super(Defaults.NAME); this.settings = settings; this.indexMode = indexMode; this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.serializeMode = serializeMode; + this.mode = new Parameter<>( + "mode", + true, + () -> null, + (n, c, o) -> Mode.valueOf(o.toString().toUpperCase(Locale.ROOT)), + m -> toType(m).enabled.explicit() ? null : toType(m).mode, + (b, n, v) -> b.field(n, v.toString().toLowerCase(Locale.ROOT)), + v -> v.toString().toLowerCase(Locale.ROOT) + ).setMergeValidator((previous, current, conflicts) -> (previous == current) || current != Mode.STORED) + // don't emit if `enabled` is configured + .setSerializerCheck((includeDefaults, isConfigured, value) -> serializeMode && value != null); } public Builder setSynthetic() { @@ -219,21 +232,22 @@ public SourceFieldMapper build() { if (sourceMode == Mode.SYNTHETIC && (includes.getValue().isEmpty() == false || excludes.getValue().isEmpty() == false)) { throw new IllegalArgumentException("filtering the stored _source is incompatible with synthetic source"); } - - SourceFieldMapper sourceFieldMapper; - if (isDefault()) { + if (mode.isConfigured()) { + serializeMode = true; + } + final SourceFieldMapper sourceFieldMapper; + if (isDefault() && sourceMode == null) { // Needed for bwc so that "mode" is not serialized in case of a standard index with stored source. - if (sourceMode == null) { - sourceFieldMapper = DEFAULT; - } else { - sourceFieldMapper = resolveStaticInstance(sourceMode); - } + sourceFieldMapper = DEFAULT; + } else if (isDefault() && serializeMode == false && sourceMode != null) { + sourceFieldMapper = resolveStaticInstance(sourceMode); } else { sourceFieldMapper = new SourceFieldMapper( sourceMode, enabled.get(), includes.getValue().toArray(Strings.EMPTY_ARRAY), - excludes.getValue().toArray(Strings.EMPTY_ARRAY) + excludes.getValue().toArray(Strings.EMPTY_ARRAY), + serializeMode ); } if (indexMode != null) { @@ -283,15 +297,29 @@ private static SourceFieldMapper resolveStaticInstance(final Mode sourceMode) { if (indexMode == IndexMode.STANDARD && settingSourceMode == Mode.STORED) { return DEFAULT; } - - return resolveStaticInstance(settingSourceMode); + if (c.indexVersionCreated().onOrAfter(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER)) { + return resolveStaticInstance(settingSourceMode); + } else { + return new SourceFieldMapper(settingSourceMode, Explicit.IMPLICIT_TRUE, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, true); + } }, c -> new Builder( c.getIndexSettings().getMode(), c.getSettings(), - c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + c.indexVersionCreated().before(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) ) - ); + ) { + @Override + public MetadataFieldMapper.Builder parse(String name, Map node, MappingParserContext parserContext) + throws MapperParsingException { + assert name.equals(SourceFieldMapper.NAME) : name; + if (parserContext.indexVersionCreated().after(IndexVersions.DEPRECATE_SOURCE_MODE_MAPPER) && node.containsKey("mode")) { + deprecationLogger.critical(DeprecationCategory.MAPPINGS, "mapping_source_mode", SourceFieldMapper.DEPRECATION_WARNING); + } + return super.parse(name, node, parserContext); + } + }; static final class SourceFieldType extends MappedFieldType { private final boolean enabled; @@ -330,8 +358,9 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { } } - // nullable for bwc reasons + // nullable for bwc reasons - TODO: fold this into serializeMode private final @Nullable Mode mode; + private final boolean serializeMode; private final Explicit enabled; /** indicates whether the source will always exist and be complete, for use by features like the update API */ @@ -341,7 +370,7 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { private final String[] excludes; private final SourceFilter sourceFilter; - private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes) { + private SourceFieldMapper(Mode mode, Explicit enabled, String[] includes, String[] excludes, boolean serializeMode) { super(new SourceFieldType((enabled.explicit() && enabled.value()) || (enabled.explicit() == false && mode != Mode.DISABLED))); this.mode = mode; this.enabled = enabled; @@ -349,6 +378,7 @@ private SourceFieldMapper(Mode mode, Explicit enabled, String[] include this.includes = includes; this.excludes = excludes; this.complete = stored() && sourceFilter == null; + this.serializeMode = serializeMode; } private static SourceFilter buildSourceFilter(String[] includes, String[] excludes) { @@ -419,7 +449,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(null, Settings.EMPTY, false).init(this); + return new Builder(null, Settings.EMPTY, false, serializeMode).init(this); } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index f1081d06d649d..9f6a2be8cdbc7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -45,10 +45,6 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i public static final ParseField LTE_FIELD = new ParseField("lte"); public static final ParseField GTE_FIELD = new ParseField("gte"); - public static final ParseField FROM_FIELD = new ParseField("from").withAllDeprecated(); - public static final ParseField TO_FIELD = new ParseField("to").withAllDeprecated(); - private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower").withAllDeprecated(); - private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper").withAllDeprecated(); public static final ParseField GT_FIELD = new ParseField("gt"); public static final ParseField LT_FIELD = new ParseField("lt"); private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); @@ -367,15 +363,7 @@ public static RangeQueryBuilder fromXContent(XContentParser parser) throws IOExc if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { - if (FROM_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - from = maybeConvertToBytesRef(parser.objectBytes()); - } else if (TO_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - to = maybeConvertToBytesRef(parser.objectBytes()); - } else if (INCLUDE_LOWER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - includeLower = parser.booleanValue(); - } else if (INCLUDE_UPPER_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - includeUpper = parser.booleanValue(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { boost = parser.floatValue(); } else if (GT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { from = maybeConvertToBytesRef(parser.objectBytes()); diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index b5197274dd519..0a56db56b2c95 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -36,6 +36,12 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long primaryDocumentRejections; private final long memoryLimit; + /* Count number of splits due to SPLIT_BULK_LOW_WATERMARK and SPLIT_BULK_HIGH_WATERMARK + These 2 stats are not serialized via X content yet. + */ + private final long lowWaterMarkSplits; + private final long highWaterMarkSplits; + // These fields will be used for additional back-pressure and metrics in the future private final long totalCoordinatingOps; private final long totalCoordinatingRequests; @@ -85,6 +91,14 @@ public IndexingPressureStats(StreamInput in) throws IOException { } else { totalCoordinatingRequests = -1L; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + lowWaterMarkSplits = in.readVLong(); + highWaterMarkSplits = in.readVLong(); + } else { + lowWaterMarkSplits = -1L; + highWaterMarkSplits = -1L; + } } public IndexingPressureStats( @@ -107,7 +121,9 @@ public IndexingPressureStats( long currentPrimaryOps, long currentReplicaOps, long primaryDocumentRejections, - long totalCoordinatingRequests + long totalCoordinatingRequests, + long lowWaterMarkSplits, + long highWaterMarkSplits ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -131,6 +147,9 @@ public IndexingPressureStats( this.primaryDocumentRejections = primaryDocumentRejections; this.totalCoordinatingRequests = totalCoordinatingRequests; + + this.lowWaterMarkSplits = lowWaterMarkSplits; + this.highWaterMarkSplits = highWaterMarkSplits; } @Override @@ -160,6 +179,11 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(totalCoordinatingRequests); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + out.writeVLong(lowWaterMarkSplits); + out.writeVLong(highWaterMarkSplits); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -242,6 +266,14 @@ public long getTotalCoordinatingRequests() { return totalCoordinatingRequests; } + public long getHighWaterMarkSplits() { + return highWaterMarkSplits; + } + + public long getLowWaterMarkSplits() { + return lowWaterMarkSplits; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index ce61f197b4831..1494d2a46f9d0 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -519,7 +519,7 @@ public static boolean isNoOpPipelineUpdate(ClusterState state, PutPipelineReques && currentIngestMetadata.getPipelines().containsKey(request.getId())) { var pipelineConfig = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); var currentPipeline = currentIngestMetadata.getPipelines().get(request.getId()); - if (currentPipeline.getConfigAsMap().equals(pipelineConfig)) { + if (currentPipeline.getConfig().equals(pipelineConfig)) { return true; } } @@ -1292,7 +1292,7 @@ synchronized void innerUpdatePipelines(IngestMetadata newIngestMetadata) { try { Pipeline newPipeline = Pipeline.create( newConfiguration.getId(), - newConfiguration.getConfigAsMap(), + newConfiguration.getConfig(false), processorFactories, scriptService ); @@ -1416,7 +1416,7 @@ public

Collection getPipelineWithProcessorType(Cla public synchronized void reloadPipeline(String id) throws Exception { PipelineHolder holder = pipelines.get(id); - Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfigAsMap(), processorFactories, scriptService); + Pipeline updatedPipeline = Pipeline.create(id, holder.configuration.getConfig(false), processorFactories, scriptService); Map updatedPipelines = new HashMap<>(this.pipelines); updatedPipelines.put(id, new PipelineHolder(holder.configuration, updatedPipeline)); this.pipelines = Map.copyOf(updatedPipelines); diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index 9067cdb2040fd..64142caf4189d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -9,12 +9,14 @@ package org.elasticsearch.ingest; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.ObjectParser; @@ -22,26 +24,32 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; -import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.Objects; /** - * Encapsulates a pipeline's id and configuration as a blob + * Encapsulates a pipeline's id and configuration as a loosely typed map -- see {@link Pipeline} for the + * parsed and processed object(s) that a pipeline configuration will become. This class is used for things + * like keeping track of pipelines in the cluster state (where a pipeline is 'just some json') whereas the + * {@link Pipeline} class is used in the actual processing of ingest documents through pipelines in the + * {@link IngestService}. */ public final class PipelineConfiguration implements SimpleDiffable, ToXContentObject { private static final ObjectParser PARSER = new ObjectParser<>("pipeline_config", true, Builder::new); static { PARSER.declareString(Builder::setId, new ParseField("id")); - PARSER.declareField((parser, builder, aVoid) -> { - XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); - contentBuilder.generator().copyCurrentStructure(parser); - builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType()); - }, new ParseField("config"), ObjectParser.ValueType.OBJECT); - + PARSER.declareField( + (parser, builder, aVoid) -> builder.setConfig(parser.map()), + new ParseField("config"), + ObjectParser.ValueType.OBJECT + ); } public static ContextParser getParser() { @@ -51,56 +59,94 @@ public static ContextParser getParser() { private static class Builder { private String id; - private BytesReference config; - private XContentType xContentType; + private Map config; void setId(String id) { this.id = id; } - void setConfig(BytesReference config, XContentType xContentType) { + void setConfig(Map config) { this.config = config; - this.xContentType = xContentType; } PipelineConfiguration build() { - return new PipelineConfiguration(id, config, xContentType); + return new PipelineConfiguration(id, config); } } private final String id; - // Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state - // and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options) - // also the get pipeline api just directly returns this to the caller - private final BytesReference config; - private final XContentType xContentType; + private final Map config; - public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + public PipelineConfiguration(String id, Map config) { this.id = Objects.requireNonNull(id); - this.config = Objects.requireNonNull(config); - this.xContentType = Objects.requireNonNull(xContentType); + this.config = deepCopy(config, true); // defensive deep copy + } + + /** + * A convenience constructor that parses some bytes as a map representing a pipeline's config and then delegates to the + * conventional {@link #PipelineConfiguration(String, Map)} constructor. + * + * @param id the id of the pipeline + * @param config a parse-able bytes reference that will return a pipeline configuration + * @param xContentType the content-type to use while parsing the pipeline configuration + */ + public PipelineConfiguration(String id, BytesReference config, XContentType xContentType) { + this(id, XContentHelper.convertToMap(config, true, xContentType).v2()); } public String getId() { return id; } - public Map getConfigAsMap() { - return XContentHelper.convertToMap(config, true, xContentType).v2(); + /** + * @return a reference to the unmodifiable configuration map for this pipeline + */ + public Map getConfig() { + return getConfig(true); } - // pkg-private for tests - XContentType getXContentType() { - return xContentType; + /** + * @param unmodifiable whether the returned map should be unmodifiable or not + * @return a reference to the unmodifiable config map (if unmodifiable is true) or + * a reference to a freshly-created mutable deep copy of the config map (if unmodifiable is false) + */ + public Map getConfig(boolean unmodifiable) { + if (unmodifiable) { + return config; // already unmodifiable + } else { + return deepCopy(config, false); + } + } + + @SuppressWarnings("unchecked") + private static T deepCopy(final T value, final boolean unmodifiable) { + return (T) innerDeepCopy(value, unmodifiable); } - // pkg-private for tests - BytesReference getConfig() { - return config; + private static Object innerDeepCopy(final Object value, final boolean unmodifiable) { + if (value instanceof Map mapValue) { + final Map copy = Maps.newLinkedHashMapWithExpectedSize(mapValue.size()); // n.b. maintain ordering + for (Map.Entry entry : mapValue.entrySet()) { + copy.put(innerDeepCopy(entry.getKey(), unmodifiable), innerDeepCopy(entry.getValue(), unmodifiable)); + } + return unmodifiable ? Collections.unmodifiableMap(copy) : copy; + } else if (value instanceof List listValue) { + final List copy = new ArrayList<>(listValue.size()); + for (Object itemValue : listValue) { + copy.add(innerDeepCopy(itemValue, unmodifiable)); + } + return unmodifiable ? Collections.unmodifiableList(copy) : copy; + } else { + // if this list of expected value types ends up not being exhaustive, then we want to learn about that + // at development time, but it's probably better to err on the side of passing through the value at runtime + assert (value == null || value instanceof String || value instanceof Number || value instanceof Boolean) + : "unexpected value type [" + value.getClass() + "]"; + return value; + } } public Integer getVersion() { - Object o = getConfigAsMap().get("version"); + Object o = config.get("version"); if (o == null) { return null; } else if (o instanceof Number number) { @@ -114,13 +160,22 @@ public Integer getVersion() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("id", id); - builder.field("config", getConfigAsMap()); + builder.field("config", config); builder.endObject(); return builder; } public static PipelineConfiguration readFrom(StreamInput in) throws IOException { - return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class)); + final String id = in.readString(); + final Map config; + if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + config = in.readGenericMap(); + } else { + final BytesReference bytes = in.readSlicedBytesReference(); + final XContentType type = in.readEnum(XContentType.class); + config = XContentHelper.convertToMap(bytes, true, type).v2(); + } + return new PipelineConfiguration(id, config); } public static Diff readDiffFrom(StreamInput in) throws IOException { @@ -135,8 +190,14 @@ public String toString() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - out.writeBytesReference(config); - XContentHelper.writeTo(out, xContentType); + if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + out.writeGenericMap(config); + } else { + XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).prettyPrint(); + builder.map(config); + out.writeBytesReference(BytesReference.bytes(builder)); + XContentHelper.writeTo(out, XContentType.JSON); + } } @Override @@ -147,14 +208,14 @@ public boolean equals(Object o) { PipelineConfiguration that = (PipelineConfiguration) o; if (id.equals(that.id) == false) return false; - return getConfigAsMap().equals(that.getConfigAsMap()); + return config.equals(that.config); } @Override public int hashCode() { int result = id.hashCode(); - result = 31 * result + getConfigAsMap().hashCode(); + result = 31 * result + config.hashCode(); return result; } @@ -164,7 +225,7 @@ public int hashCode() { *

The given upgrader is applied to the config map for any processor of the given type. */ PipelineConfiguration maybeUpgradeProcessors(String type, IngestMetadata.ProcessorConfigUpgrader upgrader) { - Map mutableConfigMap = getConfigAsMap(); + Map mutableConfigMap = getConfig(false); boolean changed = false; // This should be a List of Maps, where the keys are processor types and the values are config maps. // But we'll skip upgrading rather than fail if not. @@ -180,11 +241,7 @@ PipelineConfiguration maybeUpgradeProcessors(String type, IngestMetadata.Process } } if (changed) { - try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { - return new PipelineConfiguration(id, BytesReference.bytes(builder.map(mutableConfigMap)), xContentType); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + return new PipelineConfiguration(id, mutableConfigMap); } else { return this; } diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index acc26a42e4745..94395193622e0 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -643,6 +643,34 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.low_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_LOW_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getLowWaterMarkSplits) + .orElse(0L) + ) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.high_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_HIGH_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getHighWaterMarkSplits) + .orElse(0L) + ) + ) + ); + metrics.add( registry.registerLongAsyncCounter( "es.flush.total.time", diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index ec4a534fc883b..80c9aafaa84b4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -69,6 +69,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.MetadataUpgrader; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.repositories.RepositoriesService; @@ -180,8 +181,8 @@ public class Node implements Closeable { * * @param environment the initial environment for this node, which will be added to by plugins */ - public Node(Environment environment) { - this(NodeConstruction.prepareConstruction(environment, new NodeServiceProvider(), true)); + public Node(Environment environment, PluginsLoader pluginsLoader) { + this(NodeConstruction.prepareConstruction(environment, pluginsLoader, new NodeServiceProvider(), true)); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 62f923d673dc7..e1fc586424dec 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -42,7 +42,6 @@ import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.MasterHistoryService; import org.elasticsearch.cluster.coordination.StableMasterHealthIndicatorService; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSettings; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -165,6 +164,7 @@ import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.RecoveryPlannerPlugin; import org.elasticsearch.plugins.ReloadablePlugin; @@ -261,6 +261,7 @@ class NodeConstruction { */ static NodeConstruction prepareConstruction( Environment initialEnvironment, + PluginsLoader pluginsLoader, NodeServiceProvider serviceProvider, boolean forbidPrivateIndexSettings ) { @@ -268,7 +269,7 @@ static NodeConstruction prepareConstruction( try { NodeConstruction constructor = new NodeConstruction(closeables); - Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider); + Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider, pluginsLoader); constructor.loadLoggingDataProviders(); TelemetryProvider telemetryProvider = constructor.createTelemetryProvider(settings); ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); @@ -401,7 +402,7 @@ private static Optional getSinglePlugin(Stream plugins, Class plugi return Optional.of(plugin); } - private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider) { + private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider, PluginsLoader pluginsLoader) { // Pass the node settings to the DeprecationLogger class so that it can have the deprecation.skip_deprecated_settings setting: Settings envSettings = initialEnvironment.settings(); DeprecationLogger.initialize(envSettings); @@ -474,7 +475,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr (e, apmConfig) -> logger.error("failed to delete temporary APM config file [{}], reason: [{}]", apmConfig, e.getMessage()) ); - pluginsService = serviceProvider.newPluginService(initialEnvironment, envSettings); + pluginsService = serviceProvider.newPluginService(initialEnvironment, pluginsLoader); modules.bindToInstance(PluginsService.class, pluginsService); Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), envSettings); @@ -787,7 +788,6 @@ private void construct( if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMappingUpdateService(systemIndices, client)); - clusterService.addListener(new NodeFeaturesFixupListener(clusterService, client.admin().cluster(), threadPool)); } SourceFieldMetrics sourceFieldMetrics = new SourceFieldMetrics( @@ -822,7 +822,7 @@ private void construct( .searchOperationListeners(searchOperationListeners) .build(); - final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); + final var parameters = new IndexSettingProvider.Parameters(clusterService, indicesService::createIndexMapperServiceForValidation); IndexSettingProviders indexSettingProviders = new IndexSettingProviders( Sets.union( builtinIndexSettingProviders(), diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index f18655afb8f02..8f2dc4e532ae0 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -27,6 +27,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.script.ScriptContext; @@ -51,9 +52,9 @@ */ class NodeServiceProvider { - PluginsService newPluginService(Environment environment, Settings settings) { + PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java new file mode 100644 index 0000000000000..6b3eda6c0c9b4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -0,0 +1,461 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.ModuleQualifiedExportsService; + +import java.io.IOException; +import java.lang.ModuleLayer.Controller; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; + +/** + * This class is used to load modules and module layers for each plugin during + * node initialization prior to enablement of entitlements. This allows entitlements + * to have all the plugin information they need prior to starting. + */ +public class PluginsLoader { + + /** + * Contains information about the {@link ClassLoader} required to load a plugin + */ + public interface PluginLayer { + /** + * @return Information about the bundle of jars used in this plugin + */ + PluginBundle pluginBundle(); + + /** + * @return The {@link ClassLoader} used to instantiate the main class for the plugin + */ + ClassLoader pluginClassLoader(); + } + + /** + * Contains information about the {@link ClassLoader}s and {@link ModuleLayer} required for loading a plugin + * @param pluginBundle Information about the bundle of jars used in this plugin + * @param pluginClassLoader The {@link ClassLoader} used to instantiate the main class for the plugin + * @param spiClassLoader The exported {@link ClassLoader} visible to other Java modules + * @param spiModuleLayer The exported {@link ModuleLayer} visible to other Java modules + */ + private record LoadedPluginLayer( + PluginBundle pluginBundle, + ClassLoader pluginClassLoader, + ClassLoader spiClassLoader, + ModuleLayer spiModuleLayer + ) implements PluginLayer { + + public LoadedPluginLayer { + Objects.requireNonNull(pluginBundle); + Objects.requireNonNull(pluginClassLoader); + Objects.requireNonNull(spiClassLoader); + Objects.requireNonNull(spiModuleLayer); + } + } + + /** + * Tuple of module layer and loader. + * Modular Plugins have a plugin specific loader and layer. + * Non-Modular plugins have a plugin specific loader and the boot layer. + */ + public record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { + + public LayerAndLoader { + Objects.requireNonNull(layer); + Objects.requireNonNull(loader); + } + + public static LayerAndLoader ofLoader(ClassLoader loader) { + return new LayerAndLoader(ModuleLayer.boot(), loader); + } + } + + private static final Logger logger = LogManager.getLogger(PluginsLoader.class); + private static final Module serverModule = PluginsLoader.class.getModule(); + + private final List moduleDescriptors; + private final List pluginDescriptors; + private final Map loadedPluginLayers; + + /** + * Constructs a new PluginsLoader + * + * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem + * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + */ + @SuppressWarnings("this-escape") + public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { + + Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); + addServerExportsService(qualifiedExports); + + Set seenBundles = new LinkedHashSet<>(); + + // load (elasticsearch) module layers + if (modulesDirectory != null) { + try { + Set modules = PluginsUtils.getModuleBundles(modulesDirectory); + moduleDescriptors = modules.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(modules); + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize modules", ex); + } + } else { + moduleDescriptors = Collections.emptyList(); + } + + // load plugin layers + if (pluginsDirectory != null) { + try { + // TODO: remove this leniency, but tests bogusly rely on it + if (isAccessibleDirectory(pluginsDirectory, logger)) { + PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); + Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); + pluginDescriptors = plugins.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(plugins); + } else { + pluginDescriptors = Collections.emptyList(); + } + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize plugins", ex); + } + } else { + pluginDescriptors = Collections.emptyList(); + } + + this.loadedPluginLayers = Collections.unmodifiableMap(loadPluginLayers(seenBundles, qualifiedExports)); + } + + public List moduleDescriptors() { + return moduleDescriptors; + } + + public List pluginDescriptors() { + return pluginDescriptors; + } + + public Stream pluginLayers() { + return loadedPluginLayers.values().stream().map(Function.identity()); + } + + private Map loadPluginLayers( + Set bundles, + Map> qualifiedExports + ) { + Map loaded = new LinkedHashMap<>(); + Map> transitiveUrls = new HashMap<>(); + List sortedBundles = PluginsUtils.sortBundles(bundles); + if (sortedBundles.isEmpty() == false) { + Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); + for (PluginBundle bundle : sortedBundles) { + PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); + loadPluginLayer(bundle, loaded, qualifiedExports); + } + } + + return loaded; + } + + private void loadPluginLayer( + PluginBundle bundle, + Map loaded, + Map> qualifiedExports + ) { + String name = bundle.plugin.getName(); + logger.debug(() -> "Loading bundle: " + name); + + PluginsUtils.verifyCompatibility(bundle.plugin); + + // collect the list of extended plugins + List extendedPlugins = new ArrayList<>(); + for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { + LoadedPluginLayer extendedPlugin = loaded.get(extendedPluginName); + assert extendedPlugin != null; + assert extendedPlugin.spiClassLoader() != null : "All non-classpath plugins should be loaded with a classloader"; + extendedPlugins.add(extendedPlugin); + } + + final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( + getClass().getClassLoader(), + extendedPlugins.stream().map(LoadedPluginLayer::spiClassLoader).toList() + ); + LayerAndLoader spiLayerAndLoader = null; + if (bundle.hasSPI()) { + spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); + } + + final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); + final LayerAndLoader pluginLayerAndLoader = createPlugin( + bundle, + pluginParentLoader, + extendedPlugins, + spiLayerAndLoader, + qualifiedExports + ); + final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); + + if (spiLayerAndLoader == null) { + // use full implementation for plugins extending this one + spiLayerAndLoader = pluginLayerAndLoader; + } + + loaded.put(name, new LoadedPluginLayer(bundle, pluginClassLoader, spiLayerAndLoader.loader, spiLayerAndLoader.layer)); + } + + static LayerAndLoader createSPI( + PluginBundle bundle, + ClassLoader parentLoader, + List extendedPlugins, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); + return createSpiModuleLayer( + bundle.spiUrls, + parentLoader, + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer).toList(), + qualifiedExports + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); + } + } + + static LayerAndLoader createPlugin( + PluginBundle bundle, + ClassLoader pluginParentLoader, + List extendedPlugins, + LayerAndLoader spiLayerAndLoader, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); + var parentLayers = Stream.concat( + Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer) + ).toList(); + return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); + } else if (plugin.isStable()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); + return LayerAndLoader.ofLoader( + UberModuleClassLoader.getInstance( + pluginParentLoader, + ModuleLayer.boot(), + "synthetic." + toModuleName(plugin.getName()), + bundle.allUrls, + Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules + ) + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); + } + } + + static LayerAndLoader createSpiModuleLayer( + Set urls, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + // assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + null, // no entry point + spiModuleName(urls), + urlsToPaths(urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createPluginModuleLayer( + PluginBundle bundle, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + bundle.plugin.getClassname(), + bundle.plugin.getModuleName().get(), + urlsToPaths(bundle.urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createModuleLayer( + String className, + String moduleName, + Path[] paths, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); + var finder = ModuleFinder.of(paths); + + var configuration = Configuration.resolveAndBind( + ModuleFinder.of(), + parentConfigurationOrBoot(parentLayers), + finder, + Set.of(moduleName) + ); + var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); + var pluginModule = controller.layer().findModule(moduleName).get(); + ensureEntryPointAccessible(controller, pluginModule, className); + // export/open upstream modules to this plugin module + exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); + // configure qualified exports/opens to other modules/plugins + addPluginExportsServices(qualifiedExports, controller); + logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); + return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); + } + + /** Determines the module name of the SPI module, given its URL. */ + static String spiModuleName(Set spiURLS) { + ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); + var mrefs = finder.findAll(); + assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; + return mrefs.stream().findFirst().get().descriptor().name(); + } + + // package-visible for testing + static String toModuleName(String name) { + String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots + .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start + .replaceAll("\\.$", "") // trim trailing dot + .toLowerCase(Locale.getDefault()); + assert ModuleSupport.isPackageName(result); + return result; + } + + static final String toPackageName(String className) { + assert className.endsWith(".") == false; + int index = className.lastIndexOf('.'); + if (index == -1) { + throw new IllegalStateException("invalid class name:" + className); + } + return className.substring(0, index); + } + + @SuppressForbidden(reason = "I need to convert URL's to Paths") + static final Path[] urlsToPaths(Set urls) { + return urls.stream().map(PluginsLoader::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); + } + + static final URI uncheckedToURI(URL url) { + try { + return url.toURI(); + } catch (URISyntaxException e) { + throw new AssertionError(new IOException(e)); + } + } + + private static List parentConfigurationOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot().configuration()); + } else { + return parentLayers.stream().map(ModuleLayer::configuration).toList(); + } + } + + /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ + private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { + if (className != null) { + controller.addOpens(pluginModule, toPackageName(className), serverModule); + } + } + + @SuppressWarnings("removal") + static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { + return AccessController.doPrivileged( + (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) + ); + } + + @SuppressWarnings("removal") + static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { + return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); + } + + private static List parentLayersOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot()); + } else { + return parentLayers; + } + } + + protected void addServerExportsService(Map> qualifiedExports) { + var exportsService = new ModuleQualifiedExportsService(serverModule) { + @Override + protected void addExports(String pkg, Module target) { + serverModule.addExports(pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + serverModule.addOpens(pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, serverModule.getName()); + } + + private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { + for (Module module : controller.layer().modules()) { + var exportsService = new ModuleQualifiedExportsService(module) { + @Override + protected void addExports(String pkg, Module target) { + controller.addExports(module, pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + controller.addOpens(module, pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, module.getName()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index d5dd6d62d615e..cfdb7aaf0b509 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -23,34 +23,22 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; -import org.elasticsearch.jdk.JarHell; -import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.plugins.PluginsLoader.PluginLayer; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.plugins.spi.SPIClassIterator; import java.io.IOException; -import java.lang.ModuleLayer.Controller; -import java.lang.module.Configuration; -import java.lang.module.ModuleFinder; import java.lang.reflect.Constructor; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -63,10 +51,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; - public class PluginsService implements ReportingService { public StablePluginsRegistry getStablePluginRegistry() { @@ -77,33 +61,18 @@ public StablePluginsRegistry getStablePluginRegistry() { * A loaded plugin is one for which Elasticsearch has successfully constructed an instance of the plugin's class * @param descriptor Metadata about the plugin, usually loaded from plugin properties * @param instance The constructed instance of the plugin's main class - * @param loader The classloader for the plugin - * @param layer The module layer for the plugin */ - record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader loader, ModuleLayer layer) { + record LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { LoadedPlugin { Objects.requireNonNull(descriptor); Objects.requireNonNull(instance); - Objects.requireNonNull(loader); - Objects.requireNonNull(layer); - } - - /** - * Creates a loaded classpath plugin. A classpath plugin is a plugin loaded - * by the system classloader and defined to the unnamed module of the boot layer. - */ - LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { - this(descriptor, instance, PluginsService.class.getClassLoader(), ModuleLayer.boot()); } } private static final Logger logger = LogManager.getLogger(PluginsService.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(PluginsService.class); - private final Settings settings; - private final Path configPath; - /** * We keep around a list of plugins and modules. The order of * this list is that which the plugins and modules were loaded in. @@ -117,69 +86,32 @@ record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader lo /** * Constructs a new PluginService * - * @param settings The settings of the system - * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem - * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + * @param settings The settings for this node + * @param configPath The configuration path for this node + * @param pluginsLoader the information required to complete loading of plugins */ - @SuppressWarnings("this-escape") - public PluginsService(Settings settings, Path configPath, Path modulesDirectory, Path pluginsDirectory) { - this.settings = settings; - this.configPath = configPath; - - Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); - addServerExportsService(qualifiedExports); - - Set seenBundles = new LinkedHashSet<>(); - - // load modules - List modulesList = new ArrayList<>(); - Set moduleNameList = new HashSet<>(); - if (modulesDirectory != null) { - try { - Set modules = PluginsUtils.getModuleBundles(modulesDirectory); - modules.stream().map(PluginBundle::pluginDescriptor).forEach(m -> { - modulesList.add(m); - moduleNameList.add(m.getName()); - }); - seenBundles.addAll(modules); - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize modules", ex); - } - } + public PluginsService(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = loadPluginBundles(settings, configPath, pluginsLoader); - // load plugins - List pluginsList = new ArrayList<>(); - if (pluginsDirectory != null) { - try { - // TODO: remove this leniency, but tests bogusly rely on it - if (isAccessibleDirectory(pluginsDirectory, logger)) { - PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); - Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); - plugins.stream().map(PluginBundle::pluginDescriptor).forEach(pluginsList::add); - seenBundles.addAll(plugins); - } - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize plugins", ex); - } - } - - LinkedHashMap loadedPlugins = loadBundles(seenBundles, qualifiedExports); + var modulesDescriptors = pluginsLoader.moduleDescriptors(); + var pluginDescriptors = pluginsLoader.pluginDescriptors(); var inspector = PluginIntrospector.getInstance(); - this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginsList, loadedPlugins), modulesList); + this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginDescriptors, loadedPlugins), modulesDescriptors); this.plugins = List.copyOf(loadedPlugins.values()); - checkDeprecations(inspector, pluginsList, loadedPlugins); + checkDeprecations(inspector, pluginDescriptors, loadedPlugins); checkMandatoryPlugins( - pluginsList.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), + pluginDescriptors.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), new HashSet<>(MANDATORY_SETTING.get(settings)) ); // we don't log jars in lib/ we really shouldn't log modules, // but for now: just be transparent so we can debug any potential issues + Set moduleNames = new HashSet<>(modulesDescriptors.stream().map(PluginDescriptor::getName).toList()); for (String name : loadedPlugins.keySet()) { - if (moduleNameList.contains(name)) { + if (moduleNames.contains(name)) { logger.info("loaded module [{}]", name); } else { logger.info("loaded plugin [{}]", name); @@ -282,23 +214,11 @@ protected List plugins() { return this.plugins; } - private LinkedHashMap loadBundles( - Set bundles, - Map> qualifiedExports - ) { - LinkedHashMap loaded = new LinkedHashMap<>(); - Map> transitiveUrls = new HashMap<>(); - List sortedBundles = PluginsUtils.sortBundles(bundles); - if (sortedBundles.isEmpty() == false) { - Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); - for (PluginBundle bundle : sortedBundles) { - PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); - loadBundle(bundle, loaded, qualifiedExports); - } - } - - loadExtensions(loaded.values()); - return loaded; + private Map loadPluginBundles(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = new LinkedHashMap<>(); + pluginsLoader.pluginLayers().forEach(pl -> loadBundle(pl, loadedPlugins, settings, configPath)); + loadExtensions(loadedPlugins.values()); + return loadedPlugins; } // package-private for test visibility @@ -443,68 +363,43 @@ private static String extensionConstructorMessage(Class extensi return "constructor for extension [" + extensionClass.getName() + "] of type [" + extensionPointType.getName() + "]"; } - private void loadBundle( - PluginBundle bundle, - Map loaded, - Map> qualifiedExports - ) { - String name = bundle.plugin.getName(); - logger.debug(() -> "Loading bundle: " + name); - - PluginsUtils.verifyCompatibility(bundle.plugin); + private void loadBundle(PluginLayer pluginLayer, Map loadedPlugins, Settings settings, Path configPath) { + String name = pluginLayer.pluginBundle().plugin.getName(); + logger.debug(() -> "Loading plugin bundle: " + name); - // collect the list of extended plugins + // validate the list of extended plugins List extendedPlugins = new ArrayList<>(); - for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { - LoadedPlugin extendedPlugin = loaded.get(extendedPluginName); + for (String extendedPluginName : pluginLayer.pluginBundle().plugin.getExtendedPlugins()) { + LoadedPlugin extendedPlugin = loadedPlugins.get(extendedPluginName); assert extendedPlugin != null; if (ExtensiblePlugin.class.isInstance(extendedPlugin.instance()) == false) { throw new IllegalStateException("Plugin [" + name + "] cannot extend non-extensible plugin [" + extendedPluginName + "]"); } - assert extendedPlugin.loader() != null : "All non-classpath plugins should be loaded with a classloader"; extendedPlugins.add(extendedPlugin); logger.debug( - () -> "Loading bundle: " + name + ", ext plugins: " + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() + () -> "Loading plugin bundle: " + + name + + ", ext plugins: " + + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() ); } - final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( - getClass().getClassLoader(), - extendedPlugins.stream().map(LoadedPlugin::loader).toList() - ); - LayerAndLoader spiLayerAndLoader = null; - if (bundle.hasSPI()) { - spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); - } - - final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); - final LayerAndLoader pluginLayerAndLoader = createPlugin( - bundle, - pluginParentLoader, - extendedPlugins, - spiLayerAndLoader, - qualifiedExports - ); - final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); - - if (spiLayerAndLoader == null) { - // use full implementation for plugins extending this one - spiLayerAndLoader = pluginLayerAndLoader; - } + PluginBundle pluginBundle = pluginLayer.pluginBundle(); + ClassLoader pluginClassLoader = pluginLayer.pluginClassLoader(); // reload SPI with any new services from the plugin - reloadLuceneSPI(pluginClassLoader); + reloadLuceneSPI(pluginLayer.pluginClassLoader()); ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { // Set context class loader to plugin's class loader so that plugins // that have dependencies with their own SPI endpoints have a chance to load // and initialize them appropriately. - privilegedSetContextClassLoader(pluginClassLoader); + privilegedSetContextClassLoader(pluginLayer.pluginClassLoader()); Plugin plugin; - if (bundle.pluginDescriptor().isStable()) { - stablePluginsRegistry.scanBundleForStablePlugins(bundle, pluginClassLoader); + if (pluginBundle.pluginDescriptor().isStable()) { + stablePluginsRegistry.scanBundleForStablePlugins(pluginBundle, pluginClassLoader); /* Contrary to old plugins we don't need an instance of the plugin here. Stable plugin register components (like CharFilterFactory) in stable plugin registry, which is then used in AnalysisModule @@ -514,16 +409,16 @@ Stable plugin register components (like CharFilterFactory) in stable plugin regi We need to pass a name though so that we can show that a plugin was loaded (via cluster state api) This might need to be revisited once support for settings is added */ - plugin = new StablePluginPlaceHolder(bundle.plugin.getName()); + plugin = new StablePluginPlaceHolder(pluginBundle.plugin.getName()); } else { - Class pluginClass = loadPluginClass(bundle.plugin.getClassname(), pluginClassLoader); + Class pluginClass = loadPluginClass(pluginBundle.plugin.getClassname(), pluginClassLoader); if (pluginClassLoader != pluginClass.getClassLoader()) { throw new IllegalStateException( "Plugin [" + name + "] must reference a class loader local Plugin class [" - + bundle.plugin.getClassname() + + pluginBundle.plugin.getClassname() + "] (class loader [" + pluginClass.getClassLoader() + "])" @@ -531,75 +426,12 @@ We need to pass a name though so that we can show that a plugin was loaded (via } plugin = loadPlugin(pluginClass, settings, configPath); } - loaded.put(name, new LoadedPlugin(bundle.plugin, plugin, spiLayerAndLoader.loader(), spiLayerAndLoader.layer())); + loadedPlugins.put(name, new LoadedPlugin(pluginBundle.plugin, plugin)); } finally { privilegedSetContextClassLoader(cl); } } - static LayerAndLoader createSPI( - PluginBundle bundle, - ClassLoader parentLoader, - List extendedPlugins, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); - return createSpiModuleLayer( - bundle.spiUrls, - parentLoader, - extendedPlugins.stream().map(LoadedPlugin::layer).toList(), - qualifiedExports - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); - } - } - - static LayerAndLoader createPlugin( - PluginBundle bundle, - ClassLoader pluginParentLoader, - List extendedPlugins, - LayerAndLoader spiLayerAndLoader, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); - var parentLayers = Stream.concat( - Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), - extendedPlugins.stream().map(LoadedPlugin::layer) - ).toList(); - return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); - } else if (plugin.isStable()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); - return LayerAndLoader.ofLoader( - UberModuleClassLoader.getInstance( - pluginParentLoader, - ModuleLayer.boot(), - "synthetic." + toModuleName(plugin.getName()), - bundle.allUrls, - Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules - ) - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); - } - } - - // package-visible for testing - static String toModuleName(String name) { - String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots - .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start - .replaceAll("\\.$", "") // trim trailing dot - .toLowerCase(Locale.getDefault()); - assert ModuleSupport.isPackageName(result); - return result; - } - private static void checkDeprecations( PluginIntrospector inspector, List pluginDescriptors, @@ -706,173 +538,6 @@ public final Stream filterPlugins(Class type) { return plugins().stream().filter(x -> type.isAssignableFrom(x.instance().getClass())).map(p -> ((T) p.instance())); } - static LayerAndLoader createPluginModuleLayer( - PluginBundle bundle, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - bundle.plugin.getClassname(), - bundle.plugin.getModuleName().get(), - urlsToPaths(bundle.urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - static final LayerAndLoader createSpiModuleLayer( - Set urls, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - // assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - null, // no entry point - spiModuleName(urls), - urlsToPaths(urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - private static final Module serverModule = PluginsService.class.getModule(); - - static LayerAndLoader createModuleLayer( - String className, - String moduleName, - Path[] paths, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); - var finder = ModuleFinder.of(paths); - - var configuration = Configuration.resolveAndBind( - ModuleFinder.of(), - parentConfigurationOrBoot(parentLayers), - finder, - Set.of(moduleName) - ); - var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); - var pluginModule = controller.layer().findModule(moduleName).get(); - ensureEntryPointAccessible(controller, pluginModule, className); - // export/open upstream modules to this plugin module - exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); - // configure qualified exports/opens to other modules/plugins - addPluginExportsServices(qualifiedExports, controller); - logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); - return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); - } - - private static List parentLayersOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot()); - } else { - return parentLayers; - } - } - - private static List parentConfigurationOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot().configuration()); - } else { - return parentLayers.stream().map(ModuleLayer::configuration).toList(); - } - } - - /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ - private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { - if (className != null) { - controller.addOpens(pluginModule, toPackageName(className), serverModule); - } - } - - protected void addServerExportsService(Map> qualifiedExports) { - final Module serverModule = PluginsService.class.getModule(); - var exportsService = new ModuleQualifiedExportsService(serverModule) { - @Override - protected void addExports(String pkg, Module target) { - serverModule.addExports(pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - serverModule.addOpens(pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, serverModule.getName()); - } - - private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { - for (Module module : controller.layer().modules()) { - var exportsService = new ModuleQualifiedExportsService(module) { - @Override - protected void addExports(String pkg, Module target) { - controller.addExports(module, pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - controller.addOpens(module, pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, module.getName()); - } - } - - /** Determines the module name of the SPI module, given its URL. */ - static String spiModuleName(Set spiURLS) { - ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); - var mrefs = finder.findAll(); - assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; - return mrefs.stream().findFirst().get().descriptor().name(); - } - - /** - * Tuple of module layer and loader. - * Modular Plugins have a plugin specific loader and layer. - * Non-Modular plugins have a plugin specific loader and the boot layer. - */ - record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { - - LayerAndLoader { - Objects.requireNonNull(layer); - Objects.requireNonNull(loader); - } - - static LayerAndLoader ofLoader(ClassLoader loader) { - return new LayerAndLoader(ModuleLayer.boot(), loader); - } - } - - @SuppressForbidden(reason = "I need to convert URL's to Paths") - static final Path[] urlsToPaths(Set urls) { - return urls.stream().map(PluginsService::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); - } - - static final URI uncheckedToURI(URL url) { - try { - return url.toURI(); - } catch (URISyntaxException e) { - throw new AssertionError(new IOException(e)); - } - } - - static final String toPackageName(String className) { - assert className.endsWith(".") == false; - int index = className.lastIndexOf('.'); - if (index == -1) { - throw new IllegalStateException("invalid class name:" + className); - } - return className.substring(0, index); - } - @SuppressWarnings("removal") private static void privilegedSetContextClassLoader(ClassLoader loader) { AccessController.doPrivileged((PrivilegedAction) () -> { @@ -880,16 +545,4 @@ private static void privilegedSetContextClassLoader(ClassLoader loader) { return null; }); } - - @SuppressWarnings("removal") - static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) - ); - } - - @SuppressWarnings("removal") - static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { - return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); - } } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java index 44fb531f8610e..155cff57a0ebf 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java @@ -210,12 +210,12 @@ public static void checkForFailedPluginRemovals(final Path pluginsDirectory) thr } /** Get bundles for plugins installed in the given modules directory. */ - static Set getModuleBundles(Path modulesDirectory) throws IOException { + public static Set getModuleBundles(Path modulesDirectory) throws IOException { return findBundles(modulesDirectory, "module"); } /** Get bundles for plugins installed in the given plugins directory. */ - static Set getPluginBundles(final Path pluginsDirectory) throws IOException { + public static Set getPluginBundles(final Path pluginsDirectory) throws IOException { return findBundles(pluginsDirectory, "plugin"); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java index 29cae343fb09e..d043974055667 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -146,12 +147,12 @@ public RestResponse(RestChannel channel, RestStatus status, Exception e) throws params = new ToXContent.DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_STACK_TRACE, "false"), params); } - if (channel.detailedErrorsEnabled() == false) { + if (channel.request().getRestApiVersion() == RestApiVersion.V_8 && channel.detailedErrorsEnabled() == false) { deprecationLogger.warn( DeprecationCategory.API, "http_detailed_errors", - "The JSON format of non-detailed errors will change in Elasticsearch 9.0 to match the JSON structure" - + " used for detailed errors. To keep using the existing format, use the V8 REST API." + "The JSON format of non-detailed errors has changed in Elasticsearch 9.0 to match the JSON structure" + + " used for detailed errors." ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 7b57481ad5716..e5c4826bfce97 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -40,6 +40,10 @@ private SearchCapabilities() {} private static final String NESTED_RETRIEVER_INNER_HITS_SUPPORT = "nested_retriever_inner_hits_support"; /** Support multi-dense-vector script field access. */ private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; + /** Initial support for multi-dense-vector maxSim functions access. */ + private static final String MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM = "multi_dense_vector_script_max_sim"; + + private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; public static final Set CAPABILITIES; static { @@ -50,9 +54,11 @@ private SearchCapabilities() {} capabilities.add(DENSE_VECTOR_DOCVALUE_FIELDS); capabilities.add(TRANSFORM_RANK_RRF_TO_RETRIEVER); capabilities.add(NESTED_RETRIEVER_INNER_HITS_SUPPORT); + capabilities.add(RANDOM_SAMPLER_WITH_SCORED_SUBAGGS); if (MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(MULTI_DENSE_VECTOR_FIELD_MAPPER); capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_ACCESS); + capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM); } if (Build.current().isSnapshot()) { capabilities.add(KQL_QUERY_SUPPORTED); diff --git a/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java new file mode 100644 index 0000000000000..136c5e7b57d4b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.script.field.vectors.DenseVector; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; + +import java.io.IOException; +import java.util.HexFormat; +import java.util.List; + +public class MultiVectorScoreScriptUtils { + + public static class MultiDenseVectorFunction { + protected final ScoreScript scoreScript; + protected final MultiDenseVectorDocValuesField field; + + public MultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field) { + this.scoreScript = scoreScript; + this.field = field; + } + + void setNextVector() { + try { + field.setNextDocId(scoreScript._getDocId()); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); + } + if (field.isEmpty()) { + throw new IllegalArgumentException("A document doesn't have a value for a multi-vector field!"); + } + } + } + + public static class ByteMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final byte[][] queryVector; + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + field.getElementType().checkDimensions(field.get().getDims(), queryVector.get(0).size()); + this.queryVector = new byte[queryVector.size()][queryVector.get(0).size()]; + float[] validateValues = new float[queryVector.size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + final Number number = queryVector.get(i).get(j); + byte value = number.byteValue(); + this.queryVector[i][j] = value; + validateValues[i] = number.floatValue(); + } + field.getElementType().checkVectorBounds(validateValues); + } + } + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + this.queryVector = queryVector; + } + } + + public static class FloatMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final float[][] queryVector; + + /** + * Constructs a dense vector function used for float vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public FloatMultiDenseVectorFunction( + ScoreScript scoreScript, + MultiDenseVectorDocValuesField field, + List> queryVector + ) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + DenseVector.checkDimensions(field.get().getDims(), queryVector.get(0).size()); + + this.queryVector = new float[queryVector.size()][queryVector.get(0).size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + this.queryVector[i][j] = queryVector.get(i).get(j).floatValue(); + } + field.getElementType().checkVectorBounds(this.queryVector[i]); + } + } + } + + // Calculate Hamming distances between a query's dense vector and documents' dense vectors + public interface MaxSimInvHammingDistanceInterface { + float maxSimInvHamming(); + } + + public static class ByteMaxSimInvHammingDistance extends ByteMultiDenseVectorFunction implements MaxSimInvHammingDistanceInterface { + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public float maxSimInvHamming() { + setNextVector(); + return field.get().maxSimInvHamming(queryVector); + } + } + + private record BytesOrList(byte[][] bytes, List> list) {} + + @SuppressWarnings("unchecked") + private static BytesOrList parseBytes(Object queryVector) { + if (queryVector instanceof List) { + // check if its a list of strings or list of lists + if (((List) queryVector).get(0) instanceof List) { + return new BytesOrList(null, ((List>) queryVector)); + } else if (((List) queryVector).get(0) instanceof String) { + byte[][] parsedQueryVector = new byte[((List) queryVector).size()][]; + int lastSize = -1; + for (int i = 0; i < ((List) queryVector).size(); i++) { + parsedQueryVector[i] = HexFormat.of().parseHex((String) ((List) queryVector).get(i)); + if (lastSize != -1 && lastSize != parsedQueryVector[i].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = parsedQueryVector[i].length; + } + return new BytesOrList(parsedQueryVector, null); + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } + + public static final class MaxSimInvHamming { + + private final MaxSimInvHammingDistanceInterface function; + + public MaxSimInvHamming(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + if (field.getElementType() == DenseVectorFieldMapper.ElementType.FLOAT) { + throw new IllegalArgumentException("hamming distance is only supported for byte or bit vectors"); + } + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.bytes); + } else { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.list); + } + } + + public double maxSimInvHamming() { + return function.maxSimInvHamming(); + } + } + + // Calculate a dot product between a query's dense vector and documents' dense vectors + public interface MaxSimDotProductInterface { + double maxSimDotProduct(); + } + + public static class MaxSimBitDotProduct extends MultiDenseVectorFunction implements MaxSimDotProductInterface { + private final byte[][] byteQueryVector; + private final float[][] floatQueryVector; + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("Cannot calculate bit dot product for non-bit vectors"); + } + int fieldDims = field.get().getDims(); + if (fieldDims != queryVector.length * Byte.SIZE && fieldDims != queryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + queryVector.length + + "]." + ); + } + this.byteQueryVector = queryVector; + this.floatQueryVector = null; + } + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("cannot calculate bit dot product for non-bit vectors"); + } + float[][] floatQueryVector = new float[queryVector.size()][]; + byte[][] byteQueryVector = new byte[queryVector.size()][]; + boolean isFloat = false; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + floatQueryVector[i] = new float[queryVector.get(i).size()]; + if (isFloat == false) { + byteQueryVector[i] = new byte[queryVector.get(i).size()]; + } + for (int j = 0; j < queryVector.get(i).size(); j++) { + Number number = queryVector.get(i).get(j); + floatQueryVector[i][j] = number.floatValue(); + if (isFloat == false) { + byteQueryVector[i][j] = number.byteValue(); + } + if (isFloat + || floatQueryVector[i][j] % 1.0f != 0.0f + || floatQueryVector[i][j] < Byte.MIN_VALUE + || floatQueryVector[i][j] > Byte.MAX_VALUE) { + isFloat = true; + } + } + } + int fieldDims = field.get().getDims(); + if (isFloat) { + this.floatQueryVector = floatQueryVector; + this.byteQueryVector = null; + if (fieldDims != floatQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims + + "] for float wise operations: provided [" + + floatQueryVector[0].length + + "]." + ); + } + } else { + this.floatQueryVector = null; + this.byteQueryVector = byteQueryVector; + if (fieldDims != byteQueryVector[0].length * Byte.SIZE && fieldDims != byteQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + byteQueryVector[0].length + + "]." + ); + } + } + } + + @Override + public double maxSimDotProduct() { + setNextVector(); + return byteQueryVector != null ? field.get().maxSimDotProduct(byteQueryVector) : field.get().maxSimDotProduct(floatQueryVector); + } + } + + public static class MaxSimByteDotProduct extends ByteMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static class MaxSimFloatDotProduct extends FloatMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimFloatDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static final class MaxSimDotProduct { + + private final MaxSimDotProductInterface function; + + @SuppressWarnings("unchecked") + public MaxSimDotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + function = switch (field.getElementType()) { + case BIT -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.list); + } + } + case BYTE -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.list); + } + } + case FLOAT -> { + if (queryVector instanceof List) { + yield new MaxSimFloatDotProduct(scoreScript, field, (List>) queryVector); + } + throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName()); + } + }; + } + + public double maxSimDotProduct() { + return function.maxSimDotProduct(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java index 24e19a803ff38..7805816090d51 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java @@ -10,11 +10,13 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.simdvec.ESVectorUtil; -import java.util.Iterator; +import java.util.Arrays; public class BitMultiDenseVector extends ByteMultiDenseVector { - public BitMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public BitMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { super(vectorValues, magnitudesBytes, numVecs, dims); } @@ -31,6 +33,70 @@ public void checkDimensions(int qvDims) { } } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipFloatBit(query[i], vv)); + } + } + float sums = 0; + for (float m : maxes) { + sums += m; + } + return sums; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + if (query[0].length == dims) { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.andBitCount(query[i], vv)); + } + } + } else { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipByteBit(query[i], vv)); + } + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = this.getDims(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + @Override public int getDims() { return dims * Byte.SIZE; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java index e610d10146b2f..5e9d3e05746c8 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java @@ -10,21 +10,22 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder; +import java.util.Arrays; import java.util.Iterator; public class ByteMultiDenseVector implements MultiDenseVector { - protected final Iterator vectorValues; + protected final VectorIterator vectorValues; protected final int numVecs; protected final int dims; - private Iterator floatDocVectors; private float[] magnitudes; private final BytesRef magnitudesBytes; - public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public ByteMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { assert magnitudesBytes.length == numVecs * Float.BYTES; this.vectorValues = vectorValues; this.numVecs = numVecs; @@ -33,11 +34,50 @@ public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBy } @Override - public Iterator getVectors() { - if (floatDocVectors == null) { - floatDocVectors = new ByteToFloatIteratorWrapper(vectorValues, dims); + public float maxSimDotProduct(float[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = dims * Byte.SIZE; + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; } - return floatDocVectors; + return sum; + } + + @Override + public Iterator getVectors() { + return new ByteToFloatIteratorWrapper(vectorValues.copy(), dims); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java index d1e062e0a3dee..d45c5b85137f5 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java @@ -23,7 +23,7 @@ public class ByteMultiDenseVectorDocValuesField extends MultiDenseVectorDocValue private final BinaryDocValues magnitudes; protected final int dims; protected int numVecs; - protected Iterator vectorValue; + protected VectorIterator vectorValue; protected boolean decoded; protected BytesRef value; protected BytesRef magnitudesValue; @@ -111,7 +111,7 @@ public boolean isEmpty() { return value == null; } - static class ByteVectorIterator implements Iterator { + static class ByteVectorIterator implements VectorIterator { private final byte[] buffer; private final BytesRef vectorValues; private final int size; @@ -138,5 +138,15 @@ public byte[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new ByteVectorIterator(vectorValues, new byte[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java index 9ffe8b3b970c4..9c2f7eb6a86d4 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java @@ -10,7 +10,9 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import java.util.Arrays; import java.util.Iterator; import static org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder.getMultiMagnitudes; @@ -21,19 +23,47 @@ public class FloatMultiDenseVector implements MultiDenseVector { private float[] magnitudesArray = null; private final int dims; private final int numVectors; - private final Iterator decodedDocVector; + private final VectorIterator vectorValues; - public FloatMultiDenseVector(Iterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { + public FloatMultiDenseVector(VectorIterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { assert magnitudes.length == numVectors * Float.BYTES; - this.decodedDocVector = decodedDocVector; + this.vectorValues = decodedDocVector; this.magnitudes = magnitudes; this.numVectors = numVectors; this.dims = dims; } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + float[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new UnsupportedOperationException("hamming distance is not supported for float vectors"); + } + @Override public Iterator getVectors() { - return decodedDocVector; + return vectorValues.copy(); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java index 356db58d989c5..c7ac7842afd96 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java @@ -110,14 +110,16 @@ private void decodeVectorIfNecessary() { } } - static class FloatVectorIterator implements Iterator { + static class FloatVectorIterator implements VectorIterator { private final float[] buffer; private final FloatBuffer vectorValues; + private final BytesRef vectorValueBytesRef; private final int size; private int idx = 0; FloatVectorIterator(BytesRef vectorValues, float[] buffer, int size) { assert vectorValues.length == (buffer.length * Float.BYTES * size); + this.vectorValueBytesRef = vectorValues; this.vectorValues = ByteBuffer.wrap(vectorValues.bytes, vectorValues.offset, vectorValues.length) .order(ByteOrder.LITTLE_ENDIAN) .asFloatBuffer(); @@ -139,5 +141,16 @@ public float[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new FloatVectorIterator(vectorValueBytesRef, new float[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + vectorValues.rewind(); + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java index 85c851dbe545c..7d948cf5a74fa 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java @@ -17,6 +17,12 @@ default void checkDimensions(int qvDims) { checkDimensions(getDims(), qvDims); } + float maxSimDotProduct(float[][] query); + + float maxSimDotProduct(byte[][] query); + + float maxSimInvHamming(byte[][] query); + Iterator getVectors(); float[] getMagnitudes(); @@ -63,6 +69,21 @@ public int getDims() { throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); } + @Override + public float maxSimDotProduct(float[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + @Override public int size() { return 0; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java new file mode 100644 index 0000000000000..b8615ac877254 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import java.util.Iterator; + +public interface VectorIterator extends Iterator { + Iterator copy(); + + void reset(); + + static VectorIterator from(float[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public float[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } + + static VectorIterator from(byte[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public byte[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java index b4d5512331b42..d08a76e51c6bd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AdaptingAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.search.profile.aggregation.InternalAggregationProfileTree; @@ -98,10 +99,10 @@ public final void postCollection() throws IOException { } @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { InternalAggregation[] delegateResults = delegate.buildAggregations(owningBucketOrds); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { result[ordIdx] = adapt(delegateResults[ordIdx]); } return result; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index 0d36469dddfdc..aa8d9fba554c1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.search.sort.SortOrder; @@ -142,7 +144,7 @@ public interface BucketComparator { * @return the results for each ordinal, in the same order as the array * of ordinals */ - public abstract InternalAggregation[] buildAggregations(long[] ordsToCollect) throws IOException; + public abstract InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws IOException; /** * Release this aggregation and its sub-aggregations. @@ -153,11 +155,11 @@ public interface BucketComparator { * Build the result of this aggregation if it is at the "top level" * of the aggregation tree. If, instead, it is a sub-aggregation of * another aggregation then the aggregation that contains it will call - * {@link #buildAggregations(long[])}. + * {@link #buildAggregations(LongArray)}. */ public final InternalAggregation buildTopLevel() throws IOException { assert parent() == null; - return buildAggregations(new long[] { 0 })[0]; + return buildAggregations(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true))[0]; } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index bf9116207b375..11444edca080d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -13,6 +13,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.CheckedIntFunction; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.Maps; @@ -40,7 +42,7 @@ public abstract class AggregatorBase extends Aggregator { protected final String name; protected final Aggregator parent; - private final AggregationContext context; + protected final AggregationContext context; private final Map metadata; protected final Aggregator[] subAggregators; @@ -48,6 +50,8 @@ public abstract class AggregatorBase extends Aggregator { private Map subAggregatorbyName; private long requestBytesUsed; + private final CircuitBreaker breaker; + private int callCount; /** * Constructs a new Aggregator. @@ -72,6 +76,7 @@ protected AggregatorBase( this.metadata = metadata; this.parent = parent; this.context = context; + this.breaker = context.breaker(); assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(this, subAggregatorCardinality); context.addReleasable(this); @@ -327,6 +332,30 @@ protected final InternalAggregations buildEmptySubAggregations() { return InternalAggregations.from(aggs); } + /** + * Builds the aggregations array with the provided size and populates it using the provided function. + */ + protected final InternalAggregation[] buildAggregations(int size, CheckedIntFunction aggFunction) + throws IOException { + final InternalAggregation[] results = new InternalAggregation[size]; + for (int i = 0; i < results.length; i++) { + checkRealMemoryCB("internal_aggregation"); + results[i] = aggFunction.apply(i); + } + return results; + } + + /** + * This method calls the circuit breaker from time to time in order to give it a chance to check available + * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. + * To achieve that, we are passing 0 as the estimated bytes every 1024 calls + */ + protected final void checkRealMemoryCB(String label) { + if ((++callCount & 0x3FF) == 0) { + breaker.addEstimateBytesAndMaybeBreak(0, label); + } + } + @Override public String toString() { return name; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java index 8accc6b15d820..a32211fd4d8fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; @@ -39,11 +40,7 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildEmptyAggregation(); - } - return results; + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildEmptyAggregation()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 231130c920349..44d76d31be0e7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -20,6 +20,7 @@ import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -146,7 +147,7 @@ public void postCollection() throws IOException { * Replay the wrapped collector, but only on a selection of buckets. */ @Override - public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + public void prepareSelectedBuckets(LongArray selectedBuckets) throws IOException { if (finished == false) { throw new IllegalStateException("Cannot replay yet, collection is not finished: postCollect() has not been called"); } @@ -154,9 +155,9 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { throw new IllegalStateException("Already been replayed"); } - this.selectedBuckets = new LongHash(selectedBuckets.length, BigArrays.NON_RECYCLING_INSTANCE); - for (long ord : selectedBuckets) { - this.selectedBuckets.add(ord); + this.selectedBuckets = new LongHash(selectedBuckets.size(), BigArrays.NON_RECYCLING_INSTANCE); + for (long i = 0; i < selectedBuckets.size(); i++) { + this.selectedBuckets.add(selectedBuckets.get(i)); } boolean needsScores = scoreMode().needsScores(); @@ -232,21 +233,22 @@ private static void failInCaseOfBadScorer(String message) { * been collected directly. */ @Override - public Aggregator wrap(final Aggregator in) { + public Aggregator wrap(final Aggregator in, BigArrays bigArrays) { return new WrappedAggregator(in) { @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { if (selectedBuckets == null) { throw new IllegalStateException("Collection has not been replayed yet."); } - long[] rebasedOrds = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - rebasedOrds[ordIdx] = selectedBuckets.find(owningBucketOrds[ordIdx]); - if (rebasedOrds[ordIdx] == -1) { - throw new IllegalStateException("Cannot build for a bucket which has not been collected"); + try (LongArray rebasedOrds = bigArrays.newLongArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + rebasedOrds.set(ordIdx, selectedBuckets.find(owningBucketOrds.get(ordIdx))); + if (rebasedOrds.get(ordIdx) == -1) { + throw new IllegalStateException("Cannot build for a bucket which has not been collected"); + } } + return in.buildAggregations(rebasedOrds); } - return in.buildAggregations(rebasedOrds); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index e6c26c4278807..ea667b821a7dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -9,8 +9,9 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.Aggregator; @@ -40,10 +41,9 @@ import java.util.function.ToLongFunction; public abstract class BucketsAggregator extends AggregatorBase { - private final CircuitBreaker breaker; + private LongArray docCounts; protected final DocCountProvider docCountProvider; - private int callCount; @SuppressWarnings("this-escape") public BucketsAggregator( @@ -55,7 +55,6 @@ public BucketsAggregator( Map metadata ) throws IOException { super(name, factories, aggCtx, parent, bucketCardinality, metadata); - breaker = aggCtx.breaker(); docCounts = bigArrays().newLongArray(1, true); docCountProvider = new DocCountProvider(); } @@ -81,7 +80,7 @@ public final void collectBucket(LeafBucketCollector subCollector, int doc, long grow(bucketOrd + 1); int docCount = docCountProvider.getDocCount(doc); if (docCounts.increment(bucketOrd, docCount) == docCount) { - updateCircuitBreaker("allocated_buckets"); + checkRealMemoryCB("allocated_buckets"); } subCollector.collect(doc, bucketOrd); } @@ -155,26 +154,26 @@ public final long bucketDocCount(long bucketOrd) { /** * Hook to allow taking an action before building the sub agg results. */ - protected void prepareSubAggs(long[] ordsToCollect) throws IOException {} + protected void prepareSubAggs(LongArray ordsToCollect) throws IOException {} /** * Build the results of the sub-aggregations of the buckets at each of * the provided ordinals. *

* Most aggregations should probably use something like - * {@link #buildSubAggsForAllBuckets(Object[][], ToLongFunction, BiConsumer)} - * or {@link #buildAggregationsForVariableBuckets(long[], LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} - * or {@link #buildAggregationsForFixedBucketCount(long[], int, BucketBuilderForFixedCount, Function)} - * or {@link #buildAggregationsForSingleBucket(long[], SingleBucketResultBuilder)} + * {@link #buildSubAggsForAllBuckets(ObjectArray, ToLongFunction, BiConsumer)} + * or {@link #buildAggregationsForVariableBuckets(LongArray, LongKeyedBucketOrds, BucketBuilderForVariable, ResultBuilderForVariable)} + * or {@link #buildAggregationsForFixedBucketCount(LongArray, int, BucketBuilderForFixedCount, Function)} + * or {@link #buildAggregationsForSingleBucket(LongArray, SingleBucketResultBuilder)} * instead of calling this directly. * @return the sub-aggregation results in the same order as the provided * array of ordinals */ - protected final IntFunction buildSubAggsForBuckets(long[] bucketOrdsToCollect) throws IOException { + protected final IntFunction buildSubAggsForBuckets(LongArray bucketOrdsToCollect) throws IOException { prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { - updateCircuitBreaker("building_sub_aggregation"); + checkRealMemoryCB("building_sub_aggregation"); aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect); } return subAggsForBucketFunction(aggregations); @@ -204,26 +203,28 @@ public int size() { * @param setAggs how to set the sub-aggregation results on a bucket */ protected final void buildSubAggsForAllBuckets( - B[][] buckets, + ObjectArray buckets, ToLongFunction bucketToOrd, BiConsumer setAggs ) throws IOException { - int totalBucketOrdsToCollect = 0; - for (B[] bucketsForOneResult : buckets) { - totalBucketOrdsToCollect += bucketsForOneResult.length; + long totalBucketOrdsToCollect = 0; + for (long b = 0; b < buckets.size(); b++) { + totalBucketOrdsToCollect += buckets.get(b).length; } - long[] bucketOrdsToCollect = new long[totalBucketOrdsToCollect]; - int s = 0; - for (B[] bucketsForOneResult : buckets) { - for (B bucket : bucketsForOneResult) { - bucketOrdsToCollect[s++] = bucketToOrd.applyAsLong(bucket); + + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalBucketOrdsToCollect)) { + int s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B bucket : buckets.get(ord)) { + bucketOrdsToCollect.set(s++, bucketToOrd.applyAsLong(bucket)); + } } - } - var results = buildSubAggsForBuckets(bucketOrdsToCollect); - s = 0; - for (B[] bucket : buckets) { - for (int b = 0; b < bucket.length; b++) { - setAggs.accept(bucket[b], results.apply(s++)); + var results = buildSubAggsForBuckets(bucketOrdsToCollect); + s = 0; + for (long ord = 0; ord < buckets.size(); ord++) { + for (B value : buckets.get(ord)) { + setAggs.accept(value, results.apply(s++)); + } } } } @@ -237,37 +238,37 @@ protected final void buildSubAggsForAllBuckets( * @param resultBuilder how to build a result from buckets */ protected final InternalAggregation[] buildAggregationsForFixedBucketCount( - long[] owningBucketOrds, + LongArray owningBucketOrds, int bucketsPerOwningBucketOrd, BucketBuilderForFixedCount bucketBuilder, Function, InternalAggregation> resultBuilder ) throws IOException { - int totalBuckets = owningBucketOrds.length * bucketsPerOwningBucketOrd; - long[] bucketOrdsToCollect = new long[totalBuckets]; - int bucketOrdIdx = 0; - for (long owningBucketOrd : owningBucketOrds) { - long ord = owningBucketOrd * bucketsPerOwningBucketOrd; - for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - bucketOrdsToCollect[bucketOrdIdx++] = ord++; - } - } - bucketOrdIdx = 0; - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); - for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - buckets.add( - bucketBuilder.build( - offsetInOwningOrd, - bucketDocCount(bucketOrdsToCollect[bucketOrdIdx]), - subAggregationResults.apply(bucketOrdIdx++) - ) - ); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(owningBucketOrds.size() * bucketsPerOwningBucketOrd)) { + final int[] bucketOrdIdx = new int[] { 0 }; + for (long i = 0; i < owningBucketOrds.size(); i++) { + long ord = owningBucketOrds.get(i) * bucketsPerOwningBucketOrd; + for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + bucketOrdsToCollect.set(bucketOrdIdx[0]++, ord++); + } } - results[owningOrdIdx] = resultBuilder.apply(buckets); + bucketOrdIdx[0] = 0; + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); + for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + checkRealMemoryCBForInternalBucket(); + buckets.add( + bucketBuilder.build( + offsetInOwningOrd, + bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx[0])), + subAggregationResults.apply(bucketOrdIdx[0]++) + ) + ); + } + return resultBuilder.apply(buckets); + }); } - return results; } @FunctionalInterface @@ -280,19 +281,20 @@ protected interface BucketBuilderForFixedCount { * @param owningBucketOrds owning bucket ordinals for which to build the results * @param resultBuilder how to build a result from the sub aggregation results */ - protected final InternalAggregation[] buildAggregationsForSingleBucket(long[] owningBucketOrds, SingleBucketResultBuilder resultBuilder) - throws IOException { + protected final InternalAggregation[] buildAggregationsForSingleBucket( + LongArray owningBucketOrds, + SingleBucketResultBuilder resultBuilder + ) throws IOException { /* * It'd be entirely reasonable to call * `consumeBucketsAndMaybeBreak(owningBucketOrds.length)` * here but we don't because single bucket aggs never have. */ var subAggregationResults = buildSubAggsForBuckets(owningBucketOrds); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], subAggregationResults.apply(ordIdx)); - } - return results; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)) + ); } @FunctionalInterface @@ -307,54 +309,59 @@ protected interface SingleBucketResultBuilder { * @param bucketOrds hash of values to the bucket ordinal */ protected final InternalAggregation[] buildAggregationsForVariableBuckets( - long[] owningBucketOrds, + LongArray owningBucketOrds, LongKeyedBucketOrds bucketOrds, BucketBuilderForVariable bucketBuilder, ResultBuilderForVariable resultBuilder ) throws IOException { long totalOrdsToCollect = 0; - final int[] bucketsInOrd = new int[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - bucketsInOrd[ordIdx] = (int) bucketCount; - totalOrdsToCollect += bucketCount; - } - if (totalOrdsToCollect > Integer.MAX_VALUE) { - // TODO: We should instrument this error. While it is correct for it to be a 400 class IllegalArgumentException, there is not - // much the user can do about that. If this occurs with any frequency, we should do something about it. - throw new IllegalArgumentException( - "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" - ); - } - long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; - int b = 0; - for (long owningBucketOrd : owningBucketOrds) { - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - bucketOrdsToCollect[b++] = ordsEnum.ord(); + try (IntArray bucketsInOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)); + bucketsInOrd.set(ordIdx, (int) bucketCount); + totalOrdsToCollect += bucketCount; } - } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - b = 0; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - if (bucketOrdsToCollect[b] != ordsEnum.ord()) { - // If we hit this, something has gone horribly wrong and we need to investigate - throw AggregationErrors.iterationOrderChangedWithoutMutating( - bucketOrds.toString(), - ordsEnum.ord(), - bucketOrdsToCollect[b] - ); + if (totalOrdsToCollect > Integer.MAX_VALUE) { + // TODO: We should instrument this error. While it is correct for it to be a 400 class IllegalArgumentException, there is + // not + // much the user can do about that. If this occurs with any frequency, we should do something about it. + throw new IllegalArgumentException( + "Can't collect more than [" + Integer.MAX_VALUE + "] buckets but attempted [" + totalOrdsToCollect + "]" + ); + } + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { + final int[] b = new int[] { 0 }; + for (long i = 0; i < owningBucketOrds.size(); i++) { + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); + while (ordsEnum.next()) { + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); + } } - buckets.add(bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++))); + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + if (bucketOrdsToCollect.get(b[0]) != ordsEnum.ord()) { + // If we hit this, something has gone horribly wrong and we need to investigate + throw AggregationErrors.iterationOrderChangedWithoutMutating( + bucketOrds.toString(), + ordsEnum.ord(), + bucketOrdsToCollect.get(b[0]) + ); + } + checkRealMemoryCBForInternalBucket(); + buckets.add( + bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b[0]++)) + ); + } + return resultBuilder.build(owningBucketOrd, buckets); + }); } - results[ordIdx] = resultBuilder.build(owningBucketOrds[ordIdx], buckets); } - return results; } @FunctionalInterface @@ -412,14 +419,9 @@ protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException docCountProvider.setLeafReaderContext(ctx); } - /** - * This method calls the circuit breaker from time to time in order to give it a chance to check available - * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. - * To achieve that, we are passing 0 as the estimated bytes every 1024 calls - */ - private void updateCircuitBreaker(String label) { - if ((++callCount & 0x3FF) == 0) { - breaker.addEstimateBytesAndMaybeBreak(0, label); - } + /** This method should be called whenever a new bucket object is created. It will check the real memory + * circuit breaker in a sampling fashion. See {@link #checkRealMemoryCB(String)} */ + protected final void checkRealMemoryCBForInternalBucket() { + checkRealMemoryCB("internal_bucket"); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java index 84a15b6d1c0eb..64744b705e222 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferableBucketAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketCollector; @@ -65,7 +66,7 @@ protected void doPreCollection() throws IOException { } deferredAggregations.add(subAggregators[i]); deferredAggregationNames.add(subAggregators[i].name()); - subAggregators[i] = deferringCollector.wrap(subAggregators[i]); + subAggregators[i] = deferringCollector.wrap(subAggregators[i], bigArrays()); } else { collectors.add(subAggregators[i]); } @@ -87,7 +88,7 @@ protected DeferringBucketCollector deferringCollector() { /** * Build the {@link DeferringBucketCollector}. The default implementation * replays all hits against the buckets selected by - * {#link {@link DeferringBucketCollector#prepareSelectedBuckets(long...)}. + * {#link {@link DeferringBucketCollector#prepareSelectedBuckets(LongArray)}. */ protected DeferringBucketCollector buildDeferringCollector() { return new BestBucketsDeferringCollector(topLevelQuery(), searcher(), descendsFromGlobalAggregator(parent())); @@ -107,7 +108,7 @@ protected boolean shouldDefer(Aggregator aggregator) { } @Override - protected final void prepareSubAggs(long[] bucketOrdsToCollect) throws IOException { + protected final void prepareSubAggs(LongArray bucketOrdsToCollect) throws IOException { if (deferringCollector != null) { deferringCollector.prepareSelectedBuckets(bucketOrdsToCollect); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java index 44cff2651e273..468fec29a9420 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java @@ -10,6 +10,8 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.BucketCollector; @@ -37,13 +39,13 @@ public DeferringBucketCollector() {} /** * Replay the deferred hits on the selected buckets. */ - public abstract void prepareSelectedBuckets(long... selectedBuckets) throws IOException; + public abstract void prepareSelectedBuckets(LongArray selectedBuckets) throws IOException; /** * Wrap the provided aggregator so that it behaves (almost) as if it had * been collected directly. */ - public Aggregator wrap(final Aggregator in) { + public Aggregator wrap(final Aggregator in, BigArrays bigArrays) { return new WrappedAggregator(in); } @@ -80,7 +82,7 @@ public Aggregator subAggregator(String name) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return in.buildAggregations(owningBucketOrds); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 9ee15306ce636..0baecf6e3f92b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -35,6 +35,7 @@ import org.apache.lucene.util.RoaringDocIdSet; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexSortConfig; @@ -184,50 +185,51 @@ protected void doPostCollection() throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { // Composite aggregator must be at the top of the aggregation tree - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0L; + assert owningBucketOrds.size() == 1 && owningBucketOrds.get(0) == 0L; if (deferredCollectors != NO_OP_BUCKET_COLLECTOR) { // Replay all documents that contain at least one top bucket (collected during the first pass). runDeferredCollections(); } - int num = Math.min(size, (int) queue.size()); + final int num = Math.min(size, (int) queue.size()); final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; - long[] bucketOrdsToCollect = new long[(int) queue.size()]; - for (int i = 0; i < queue.size(); i++) { - bucketOrdsToCollect[i] = i; - } - var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); - while (queue.size() > 0) { - int slot = queue.pop(); - CompositeKey key = queue.toCompositeKey(slot); - InternalAggregations aggs = subAggsForBuckets.apply(slot); - long docCount = queue.getDocCount(slot); - buckets[(int) queue.size()] = new InternalComposite.InternalBucket( - sourceNames, - formats, - key, - reverseMuls, - missingOrders, - docCount, - aggs - ); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(queue.size())) { + for (int i = 0; i < queue.size(); i++) { + bucketOrdsToCollect.set(i, i); + } + var subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); + while (queue.size() > 0) { + int slot = queue.pop(); + CompositeKey key = queue.toCompositeKey(slot); + InternalAggregations aggs = subAggsForBuckets.apply(slot); + long docCount = queue.getDocCount(slot); + buckets[(int) queue.size()] = new InternalComposite.InternalBucket( + sourceNames, + formats, + key, + reverseMuls, + missingOrders, + docCount, + aggs + ); + } + CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; + return new InternalAggregation[] { + new InternalComposite( + name, + size, + sourceNames, + formats, + Arrays.asList(buckets), + lastBucket, + reverseMuls, + missingOrders, + earlyTerminated, + metadata() + ) }; } - CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; - return new InternalAggregation[] { - new InternalComposite( - name, - size, - sourceNames, - formats, - Arrays.asList(buckets), - lastBucket, - reverseMuls, - missingOrders, - earlyTerminated, - metadata() - ) }; } @Override @@ -244,6 +246,7 @@ public InternalAggregation buildEmptyAggregation() { false, metadata() ); + } private void finishLeaf() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index af4d60bf424a7..344b90b06c4f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -13,6 +13,8 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -108,70 +110,80 @@ private void collectOrdinal(long bucketOrdinal, int doc, LeafBucketCollector sub } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - StringTerms.Bucket[][] topBucketsPerOrd = new StringTerms.Bucket[owningBucketOrds.length][]; - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); - - // as users can't control sort order, in practice we'll always sort by doc count descending - try ( - BucketPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - StringTerms.Bucket spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = () -> new StringTerms.Bucket(new BytesRef(), 0, null, false, 0, format); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (spare == null) { - spare = emptyBucketBuilder.get(); + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size()); + ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + // as users can't control sort order, in practice we'll always sort by doc count descending + try ( + BucketPriorityQueue ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + StringTerms.Bucket spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + Supplier emptyBucketBuilder = () -> new StringTerms.Bucket( + new BytesRef(), + 0, + null, + false, + 0, + format + ); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = emptyBucketBuilder.get(); + } + ordsEnum.readValue(spare.getTermBytes()); + spare.setDocCount(docCount); + spare.setBucketOrd(ordsEnum.ord()); + spare = ordered.insertWithOverflow(spare); } - ordsEnum.readValue(spare.getTermBytes()); - spare.setDocCount(docCount); - spare.setBucketOrd(ordsEnum.ord()); - spare = ordered.insertWithOverflow(spare); - } - topBucketsPerOrd[ordIdx] = new StringTerms.Bucket[(int) ordered.size()]; - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); - topBucketsPerOrd[ordIdx][i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd[ordIdx][i].getTermBytes())); + topBucketsPerOrd.set(ordIdx, new StringTerms.Bucket[(int) ordered.size()]); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); + topBucketsPerOrd.get(ordIdx)[i].setTermBytes(BytesRef.deepCopyOf(topBucketsPerOrd.get(ordIdx)[i].getTermBytes())); + } } } - } - buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final BucketOrder reduceOrder; - if (isKeyOrder(order) == false) { - reduceOrder = InternalOrder.key(true); - Arrays.sort(topBucketsPerOrd[ordIdx], reduceOrder.comparator()); - } else { - reduceOrder = order; - } - result[ordIdx] = new StringTerms( - name, - reduceOrder, - order, - bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), - metadata(), - format, - bucketCountThresholds.getShardSize(), - false, - otherDocCounts[ordIdx], - Arrays.asList(topBucketsPerOrd[ordIdx]), - null - ); + buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); + + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + final BucketOrder reduceOrder; + if (isKeyOrder(order) == false) { + reduceOrder = InternalOrder.key(true); + Arrays.sort(topBucketsPerOrd.get(ordIdx), reduceOrder.comparator()); + } else { + reduceOrder = order; + } + return new StringTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + format, + bucketCountThresholds.getShardSize(), + false, + otherDocCounts.get(ordIdx), + Arrays.asList(topBucketsPerOrd.get(ordIdx)), + null + ); + }); } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index fede97c7fddee..69eff3630a8f4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -208,7 +209,7 @@ List filters() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, filters.size() + (otherBucketKey == null ? 0 : 1), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index cde26bb2214ed..1d3614af08768 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -132,39 +134,40 @@ public void collect(int doc, long owningBucketOrd) throws IOException { protected abstract InternalGeoGridBucket newEmptyBucket(); @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalGeoGridBucket[][] topBucketsPerOrd = new InternalGeoGridBucket[owningBucketOrds.length][]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]), shardSize); - - try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { - InternalGeoGridBucket spare = null; - LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - if (spare == null) { - spare = newEmptyBucket(); - } + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)), shardSize); + + try (BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, bigArrays())) { + InternalGeoGridBucket spare = null; + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = newEmptyBucket(); + } - // need a special function to keep the source bucket - // up-to-date so it can get the appropriate key - spare.hashAsLong = ordsEnum.value(); - spare.docCount = bucketDocCount(ordsEnum.ord()); - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } + // need a special function to keep the source bucket + // up-to-date so it can get the appropriate key + spare.hashAsLong = ordsEnum.value(); + spare.docCount = bucketDocCount(ordsEnum.ord()); + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } - topBucketsPerOrd[ordIdx] = new InternalGeoGridBucket[(int) ordered.size()]; - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); + topBucketsPerOrd.set(ordIdx, new InternalGeoGridBucket[(int) ordered.size()]); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + } } } + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()) + ); } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd[ordIdx]), metadata()); - } - return results; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index b5d3485e72f82..b83001c34377e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.Weight; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; @@ -62,8 +63,8 @@ public void setScorer(Scorable scorer) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - assert owningBucketOrds.length == 1 && owningBucketOrds[0] == 0 : "global aggregator can only be a top level aggregator"; + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + assert owningBucketOrds.size() == 1 && owningBucketOrds.get(0) == 0 : "global aggregator can only be a top level aggregator"; return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalGlobal( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index b81d8b002b6b2..ed687df6377dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -79,7 +80,7 @@ public AbstractHistogramAggregator( } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { double roundKey = Double.longBitsToDouble(bucketValue); double key = roundKey * interval + offset; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 86c320d8dc319..cc2db63fa5ec5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -17,6 +17,7 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.Rounding.DateTimeUnit; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; @@ -337,7 +338,7 @@ private void addRoundedValue(long rounded, int doc, long owningBucketOrd, LeafBu } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index 2bfd85e5fe03a..f385f7c34f6b7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Rounding; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; @@ -163,7 +164,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets( owningBucketOrds, bucketOrds, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java index 1afb06067f770..86ec1666e2cea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.util.InPlaceMergeSorter; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -565,34 +566,35 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { int numClusters = collector.finalNumBuckets(); - long[] bucketOrdsToCollect = new long[numClusters]; - for (int i = 0; i < numClusters; i++) { - bucketOrdsToCollect[i] = i; - } + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(numClusters)) { + for (int i = 0; i < numClusters; i++) { + bucketOrdsToCollect.set(i, i); + } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - List buckets = new ArrayList<>(numClusters); - for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) { - buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd))); - } + List buckets = new ArrayList<>(numClusters); + for (int bucketOrd = 0; bucketOrd < numClusters; bucketOrd++) { + buckets.add(collector.buildBucket(bucketOrd, subAggregationResults.apply(bucketOrd))); + } - Function, InternalAggregation> resultBuilder = bucketsToFormat -> { - // The contract of the histogram aggregation is that shards must return - // buckets ordered by centroid in ascending order - CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); + Function, InternalAggregation> resultBuilder = bucketsToFormat -> { + // The contract of the histogram aggregation is that shards must return + // buckets ordered by centroid in ascending order + CollectionUtil.introSort(bucketsToFormat, BucketOrder.key(true).comparator()); - InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( - buildEmptySubAggregations() - ); + InternalVariableWidthHistogram.EmptyBucketInfo emptyBucketInfo = new InternalVariableWidthHistogram.EmptyBucketInfo( + buildEmptySubAggregations() + ); - return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); - }; + return new InternalVariableWidthHistogram(name, bucketsToFormat, emptyBucketInfo, numBuckets, formatter, metadata()); + }; - return new InternalAggregation[] { resultBuilder.apply(buckets) }; + return new InternalAggregation[] { resultBuilder.apply(buckets) }; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java index 5c8f8ab9c562e..b49668e45b889 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.search.aggregations.bucket.missing; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.DocValueBits; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -67,7 +68,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalMissing( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 0fbb9745aa400..23a2d6380c290 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -124,7 +125,7 @@ private void processBufferedDocs() throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalNested( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 0e3e4679c7a2d..2477b67367e14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.NestedObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; @@ -86,7 +87,7 @@ public void collect(int childDoc, long bucket) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalReverseNested( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index 9548cd871e161..e3192e9b2fa16 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -160,57 +162,62 @@ private static void maskIpAddress(final BytesRef ipAddress, final BytesRef subne } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { long totalOrdsToCollect = 0; - final int[] bucketsInOrd = new int[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - bucketsInOrd[ordIdx] = (int) bucketCount; - totalOrdsToCollect += bucketCount; - } - - long[] bucketOrdsToCollect = new long[(int) totalOrdsToCollect]; - int b = 0; - for (long owningBucketOrd : owningBucketOrds) { - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); - while (ordsEnum.next()) { - bucketOrdsToCollect[b++] = ordsEnum.ord(); + try (IntArray bucketsInOrd = bigArrays().newIntArray(owningBucketOrds.size())) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long bucketCount = bucketOrds.bucketsInOrd(owningBucketOrds.get(ordIdx)); + bucketsInOrd.set(ordIdx, (int) bucketCount); + totalOrdsToCollect += bucketCount; } - } - var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - b = 0; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - List buckets = new ArrayList<>(bucketsInOrd[ordIdx]); - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long ordinal = ordsEnum.ord(); - if (bucketOrdsToCollect[b] != ordinal) { - throw AggregationErrors.iterationOrderChangedWithoutMutating(bucketOrds.toString(), ordinal, bucketOrdsToCollect[b]); + try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { + int[] b = new int[] { 0 }; + for (long i = 0; i < owningBucketOrds.size(); i++) { + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); + while (ordsEnum.next()) { + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); + } } - BytesRef ipAddress = new BytesRef(); - ordsEnum.readValue(ipAddress); - long docCount = bucketDocCount(ordinal); - buckets.add( - new InternalIpPrefix.Bucket( - config.format(), - BytesRef.deepCopyOf(ipAddress), - keyed, - ipPrefix.isIpv6, - ipPrefix.prefixLength, - ipPrefix.appendPrefixLength, - docCount, - subAggregationResults.apply(b++) - ) - ); - - // NOTE: the aggregator is expected to return sorted results - CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + + var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); + while (ordsEnum.next()) { + long ordinal = ordsEnum.ord(); + if (bucketOrdsToCollect.get(b[0]) != ordinal) { + throw AggregationErrors.iterationOrderChangedWithoutMutating( + bucketOrds.toString(), + ordinal, + bucketOrdsToCollect.get(b[0]) + ); + } + BytesRef ipAddress = new BytesRef(); + ordsEnum.readValue(ipAddress); + long docCount = bucketDocCount(ordinal); + checkRealMemoryCBForInternalBucket(); + buckets.add( + new InternalIpPrefix.Bucket( + config.format(), + BytesRef.deepCopyOf(ipAddress), + keyed, + ipPrefix.isIpv6, + ipPrefix.prefixLength, + ipPrefix.appendPrefixLength, + docCount, + subAggregationResults.apply(b[0]++) + ) + ); + + // NOTE: the aggregator is expected to return sorted results + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + } + return new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); + }); } - results[ordIdx] = new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); } - return results; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 6119af3cb6a57..9bde8d007c1b7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; @@ -359,7 +360,7 @@ private interface DocCollector { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, ranges.length, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 6d63bb786c29f..0654a788a10a9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; @@ -531,7 +532,7 @@ protected long subBucketOrdinal(long owningBucketOrdinal, int rangeOrd) { @Override @SuppressWarnings("unchecked") - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForFixedBucketCount( owningBucketOrds, ranges.length, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 37cee75c11b48..70f72fafba7b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -19,6 +19,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -120,7 +121,7 @@ public void postCollection() throws IOException { } @Override - public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + public void prepareSelectedBuckets(LongArray selectedBuckets) { // no-op - deferred aggs processed in postCollection call } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 78b2cdfe7655d..a4c06a194fbf7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -11,6 +11,7 @@ import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.aggregations.AggregationExecutionContext; @@ -212,7 +213,7 @@ protected boolean shouldDefer(Aggregator aggregator) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalSampler( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index fc03786356f87..699b8c6b5d500 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -9,18 +9,23 @@ package org.elasticsearch.search.aggregations.bucket.sampler.random; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; -import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; @@ -33,14 +38,13 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single private final int seed; private final Integer shardSeed; private final double probability; - private final CheckedSupplier weightSupplier; + private Weight weight; RandomSamplerAggregator( String name, int seed, Integer shardSeed, double probability, - CheckedSupplier weightSupplier, AggregatorFactories factories, AggregationContext context, Aggregator parent, @@ -55,12 +59,35 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single RandomSamplerAggregationBuilder.NAME + " aggregation [" + name + "] must have sub aggregations configured" ); } - this.weightSupplier = weightSupplier; this.shardSeed = shardSeed; } + /** + * This creates the query weight which will be used in the aggregator. + * + * This weight is a boolean query between {@link RandomSamplingQuery} and the configured top level query of the search. This allows + * the aggregation to iterate the documents directly, thus sampling in the background instead of the foreground. + * @return weight to be used, is cached for additional usages + * @throws IOException when building the weight or queries fails; + */ + private Weight getWeight() throws IOException { + if (weight == null) { + ScoreMode scoreMode = scoreMode(); + BooleanQuery.Builder fullQuery = new BooleanQuery.Builder().add( + context.query(), + scoreMode.needsScores() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER + ); + if (probability < 1.0) { + Query sampleQuery = new RandomSamplingQuery(probability, seed, shardSeed == null ? context.shardRandomSeed() : shardSeed); + fullQuery.add(sampleQuery, BooleanClause.Occur.FILTER); + } + weight = context.searcher().createWeight(context.searcher().rewrite(fullQuery.build()), scoreMode, 1f); + } + return weight; + } + @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForSingleBucket( owningBucketOrds, (owningBucketOrd, subAggregationResults) -> new InternalRandomSampler( @@ -100,22 +127,26 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt if (sub.isNoop()) { return LeafBucketCollector.NO_OP_COLLECTOR; } + + Scorer scorer = getWeight().scorer(aggCtx.getLeafReaderContext()); + // This means there are no docs to iterate, possibly due to the fields not existing + if (scorer == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + sub.setScorer(scorer); + // No sampling is being done, collect all docs + // TODO know when sampling would be much slower and skip sampling: https://github.com/elastic/elasticsearch/issues/84353 if (probability >= 1.0) { grow(1); - return new LeafBucketCollector() { + return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { collectExistingBucket(sub, doc, 0); } }; } - // TODO know when sampling would be much slower and skip sampling: https://github.com/elastic/elasticsearch/issues/84353 - Scorer scorer = weightSupplier.get().scorer(aggCtx.getLeafReaderContext()); - // This means there are no docs to iterate, possibly due to the fields not existing - if (scorer == null) { - return LeafBucketCollector.NO_OP_COLLECTOR; - } + final DocIdSetIterator docIt = scorer.iterator(); final Bits liveDocs = aggCtx.getLeafReaderContext().reader().getLiveDocs(); try { @@ -135,5 +166,4 @@ public void collect(int doc, long owningBucketOrd) throws IOException { // Since we have done our own collection, there is nothing for the leaf collector to do return LeafBucketCollector.NO_OP_COLLECTOR; } - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java index 67c958046dac7..50921501896d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java @@ -9,10 +9,6 @@ package org.elasticsearch.search.aggregations.bucket.sampler.random; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -30,7 +26,6 @@ public class RandomSamplerAggregatorFactory extends AggregatorFactory { private final Integer shardSeed; private final double probability; private final SamplingContext samplingContext; - private Weight weight; RandomSamplerAggregatorFactory( String name, @@ -57,41 +52,6 @@ public Optional getSamplingContext() { @Override public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - return new RandomSamplerAggregator( - name, - seed, - shardSeed, - probability, - this::getWeight, - factories, - context, - parent, - cardinality, - metadata - ); + return new RandomSamplerAggregator(name, seed, shardSeed, probability, factories, context, parent, cardinality, metadata); } - - /** - * This creates the query weight which will be used in the aggregator. - * - * This weight is a boolean query between {@link RandomSamplingQuery} and the configured top level query of the search. This allows - * the aggregation to iterate the documents directly, thus sampling in the background instead of the foreground. - * @return weight to be used, is cached for additional usages - * @throws IOException when building the weight or queries fails; - */ - private Weight getWeight() throws IOException { - if (weight == null) { - RandomSamplingQuery query = new RandomSamplingQuery( - probability, - seed, - shardSeed == null ? context.shardRandomSeed() : shardSeed - ); - BooleanQuery booleanQuery = new BooleanQuery.Builder().add(query, BooleanClause.Occur.FILTER) - .add(context.query(), BooleanClause.Occur.FILTER) - .build(); - weight = context.searcher().createWeight(context.searcher().rewrite(booleanQuery), ScoreMode.COMPLETE_NO_SCORES, 1f); - } - return weight; - } - } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 0f7c61dc9f25b..db9da6ed67207 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -191,7 +192,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -696,61 +697,66 @@ abstract class ResultStrategy< B extends InternalMultiBucketAggregation.InternalBucket, TB extends InternalMultiBucketAggregation.InternalBucket> implements Releasable { - private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + if (valueCount == 0) { // no context in this reader - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildNoValuesResult(owningBucketOrds[ordIdx]); - } - return results; + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildNoValuesResult(owningBucketOrds.get(ordIdx)) + ); } - - B[][] topBucketsPreOrd = buildTopBucketsPerOrd(owningBucketOrds.length); - long[] otherDocCount = new long[owningBucketOrds.length]; - GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - final int size; - if (bucketCountThresholds.getMinDocCount() == 0) { - // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns - size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); - } else { - size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); - } - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - final int finalOrdIdx = ordIdx; - BucketUpdater updater = bucketUpdater(owningBucketOrds[ordIdx], lookupGlobalOrd); - collectionStrategy.forEach(owningBucketOrds[ordIdx], new BucketInfoConsumer() { - TB spare = null; - - @Override - public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { - otherDocCount[finalOrdIdx] += docCount; - if (docCount >= bucketCountThresholds.getShardMinDocCount()) { - if (spare == null) { - spare = buildEmptyTemporaryBucket(); + try ( + LongArray otherDocCount = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPreOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) + ) { + GlobalOrdLookupFunction lookupGlobalOrd = valuesSupplier.get()::lookupOrd; + for (long ordIdx = 0; ordIdx < topBucketsPreOrd.size(); ordIdx++) { + final int size; + if (bucketCountThresholds.getMinDocCount() == 0) { + // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns + size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); + } else { + size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); + } + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + final long finalOrdIdx = ordIdx; + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + BucketUpdater updater = bucketUpdater(owningBucketOrd, lookupGlobalOrd); + collectionStrategy.forEach(owningBucketOrd, new BucketInfoConsumer() { + TB spare = null; + + @Override + public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { + otherDocCount.increment(finalOrdIdx, docCount); + if (docCount >= bucketCountThresholds.getShardMinDocCount()) { + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = buildEmptyTemporaryBucket(); + } + updater.updateBucket(spare, globalOrd, bucketOrd, docCount); + spare = ordered.insertWithOverflow(spare); } - updater.updateBucket(spare, globalOrd, bucketOrd, docCount); - spare = ordered.insertWithOverflow(spare); } + }); + + // Get the top buckets + topBucketsPreOrd.set(ordIdx, buildBuckets((int) ordered.size())); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + checkRealMemoryCBForInternalBucket(); + B bucket = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); + topBucketsPreOrd.get(ordIdx)[i] = bucket; + otherDocCount.increment(ordIdx, -bucket.getDocCount()); } - }); - - // Get the top buckets - topBucketsPreOrd[ordIdx] = buildBuckets((int) ordered.size()); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPreOrd[ordIdx][i] = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); - otherDocCount[ordIdx] -= topBucketsPreOrd[ordIdx][i].getDocCount(); } } - } - buildSubAggs(topBucketsPreOrd); + buildSubAggs(topBucketsPreOrd); - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCount[ordIdx], topBucketsPreOrd[ordIdx]); + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)) + ); } - return results; } /** @@ -785,7 +791,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal to collect the @@ -802,7 +808,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPreOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException; /** * Turn the buckets into an aggregation result. @@ -841,8 +847,8 @@ LeafBucketCollector wrapCollector(LeafBucketCollector primary) { } @Override - StringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new StringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -879,7 +885,7 @@ StringTerms.Bucket convertTempBucketToRealBucket(OrdBucket temp, GlobalOrdLookup } @Override - void buildSubAggs(StringTerms.Bucket[][] topBucketsPreOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -973,8 +979,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - SignificantStringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantStringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -1026,7 +1032,7 @@ SignificantStringTerms.Bucket convertTempBucketToRealBucket( } @Override - void buildSubAggs(SignificantStringTerms.Bucket[][] topBucketsPreOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPreOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPreOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index a60911b466847..eeb7305ac51fa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -62,7 +62,7 @@ public interface Reader> { long supersetSize; /** * Ordinal of the bucket while it is being built. Not used after it is - * returned from {@link Aggregator#buildAggregations(long[])} and not + * returned from {@link Aggregator#buildAggregations(org.elasticsearch.common.util.LongArray)} and not * serialized. */ transient long bucketOrd; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java index 651705bd71ef8..45ea1245ec38d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java @@ -12,7 +12,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; @@ -118,70 +120,67 @@ private void collectValue(long val, int docId, long owningBucketOrd, LeafBucketC } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Collect the list of buckets, populate the filter with terms * that are too frequent, and figure out how to merge sub-buckets. */ - LongRareTerms.Bucket[][] rarestPerOrd = new LongRareTerms.Bucket[owningBucketOrds.length][]; - SetBackedScalingCuckooFilter[] filters = new SetBackedScalingCuckooFilter[owningBucketOrds.length]; - long keepCount = 0; - long[] mergeMap = new long[(int) bucketOrds.size()]; - Arrays.fill(mergeMap, -1); - long offset = 0; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - try (LongHash bucketsInThisOwningBucketToCollect = new LongHash(1, bigArrays())) { - filters[owningOrdIdx] = newFilter(); - List builtBuckets = new ArrayList<>(); - LongKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds[owningOrdIdx]); - while (collectedBuckets.next()) { - long docCount = bucketDocCount(collectedBuckets.ord()); - // if the key is below threshold, reinsert into the new ords - if (docCount <= maxDocCount) { - LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); - bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); - mergeMap[(int) collectedBuckets.ord()] = bucket.bucketOrd; - builtBuckets.add(bucket); - keepCount++; - } else { - filters[owningOrdIdx].add(collectedBuckets.value()); + try ( + ObjectArray rarestPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()); + ObjectArray filters = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + try (LongArray mergeMap = bigArrays().newLongArray(bucketOrds.size())) { + mergeMap.fill(0, mergeMap.size(), -1); + long keepCount = 0; + long offset = 0; + for (long owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.size(); owningOrdIdx++) { + try (LongHash bucketsInThisOwningBucketToCollect = new LongHash(1, bigArrays())) { + filters.set(owningOrdIdx, newFilter()); + List builtBuckets = new ArrayList<>(); + LongKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds.get(owningOrdIdx)); + while (collectedBuckets.next()) { + long docCount = bucketDocCount(collectedBuckets.ord()); + // if the key is below threshold, reinsert into the new ords + if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); + LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); + bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); + mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); + builtBuckets.add(bucket); + keepCount++; + } else { + filters.get(owningOrdIdx).add(collectedBuckets.value()); + } + } + rarestPerOrd.set(owningOrdIdx, builtBuckets.toArray(LongRareTerms.Bucket[]::new)); + offset += bucketsInThisOwningBucketToCollect.size(); } } - rarestPerOrd[owningOrdIdx] = builtBuckets.toArray(LongRareTerms.Bucket[]::new); - offset += bucketsInThisOwningBucketToCollect.size(); - } - } - /* - * Only merge/delete the ordinals if we have actually deleted one, - * to save on some redundant work. - */ - if (keepCount != mergeMap.length) { - LongUnaryOperator howToMerge = b -> mergeMap[(int) b]; - rewriteBuckets(offset, howToMerge); - if (deferringCollector() != null) { - ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + /* + * Only merge/delete the ordinals if we have actually deleted one, + * to save on some redundant work. + */ + if (keepCount != mergeMap.size()) { + LongUnaryOperator howToMerge = mergeMap::get; + rewriteBuckets(offset, howToMerge); + if (deferringCollector() != null) { + ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + } + } } - } - /* - * Now build the results! - */ - buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - Arrays.sort(rarestPerOrd[ordIdx], ORDER.comparator()); - result[ordIdx] = new LongRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(rarestPerOrd[ordIdx]), - maxDocCount, - filters[ordIdx] - ); + /* + * Now build the results! + */ + buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + + return LongRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + LongRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); + Arrays.sort(buckets, ORDER.comparator()); + return new LongRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index 76202b6386a73..6ae47d5975479 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -18,6 +18,7 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -117,7 +118,7 @@ public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -282,45 +283,50 @@ abstract class ResultStrategy ordered = buildPriorityQueue(size)) { - B spare = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = emptyBucketBuilder.get(); + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(Math.toIntExact(owningBucketOrds.size())) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + long owningOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningOrd, excludeDeletedDocs); + int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); + + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + B spare = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningOrd); + Supplier emptyBucketBuilder = emptyBucketBuilder(owningOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = emptyBucketBuilder.get(); + } + updateBucket(spare, ordsEnum, docCount); + spare = ordered.insertWithOverflow(spare); } - updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - topBucketsPerOrd[ordIdx] = buildBuckets((int) ordered.size()); - for (int i = (int) ordered.size() - 1; i >= 0; --i) { - topBucketsPerOrd[ordIdx][i] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][i].getDocCount(); - finalizeBucket(topBucketsPerOrd[ordIdx][i]); + topBucketsPerOrd.set(ordIdx, buildBuckets((int) ordered.size())); + for (int i = (int) ordered.size() - 1; i >= 0; --i) { + topBucketsPerOrd.get(ordIdx)[i] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[i].getDocCount()); + finalizeBucket(topBucketsPerOrd.get(ordIdx)[i]); + } } } - } - buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + buildSubAggs(topBucketsPerOrd); + + return MapStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } - return result; } /** @@ -361,7 +367,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal to collect the @@ -379,7 +385,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; /** * Turn the buckets into an aggregation result. @@ -501,8 +507,8 @@ void updateBucket(StringTerms.Bucket spare, BytesKeyedBucketOrds.BucketOrdsEnum } @Override - StringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new StringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -521,7 +527,7 @@ void finalizeBucket(StringTerms.Bucket bucket) { } @Override - void buildSubAggs(StringTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); } @@ -637,8 +643,8 @@ void updateBucket(SignificantStringTerms.Bucket spare, BytesKeyedBucketOrds.Buck } @Override - SignificantStringTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantStringTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -657,7 +663,7 @@ void finalizeBucket(SignificantStringTerms.Bucket bucket) { } @Override - void buildSubAggs(SignificantStringTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index d39348d80df14..ce89b95b76a05 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -39,7 +40,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Function; @@ -136,7 +136,7 @@ private void collectValue(long val, int doc, long owningBucketOrd, LeafBucketCol } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return resultStrategy.buildAggregations(owningBucketOrds); } @@ -163,48 +163,52 @@ public void collectDebugInfo(BiConsumer add) { abstract class ResultStrategy implements Releasable { - private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - B[][] topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.length); - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - collectZeroDocEntriesIfNeeded(owningBucketOrds[ordIdx], excludeDeletedDocs); - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { - B spare = null; - BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = emptyBucketBuilder.get(); + private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = buildTopBucketsPerOrd(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < topBucketsPerOrd.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + collectZeroDocEntriesIfNeeded(owningBucketOrd, excludeDeletedDocs); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try (ObjectArrayPriorityQueue ordered = buildPriorityQueue(size)) { + B spare = null; + BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + Supplier emptyBucketBuilder = emptyBucketBuilder(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = emptyBucketBuilder.get(); + } + updateBucket(spare, ordsEnum, docCount); + spare = ordered.insertWithOverflow(spare); } - updateBucket(spare, ordsEnum, docCount); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - B[] bucketsForOrd = buildBuckets((int) ordered.size()); - topBucketsPerOrd[ordIdx] = bucketsForOrd; - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd[ordIdx][b] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + // Get the top buckets + B[] bucketsForOrd = buildBuckets((int) ordered.size()); + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + for (int b = (int) ordered.size() - 1; b >= 0; --b) { + topBucketsPerOrd.get(ordIdx)[b] = ordered.pop(); + otherDocCounts.increment(ordIdx, -topBucketsPerOrd.get(ordIdx)[b].getDocCount()); + } } } - } - buildSubAggs(topBucketsPerOrd); + buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + return NumericTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } - return result; } /** @@ -227,7 +231,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws /** * Build an array to hold the "top" buckets for each ordinal. */ - abstract B[][] buildTopBucketsPerOrd(int size); + abstract ObjectArray buildTopBucketsPerOrd(long size); /** * Build an array of buckets for a particular ordinal. These arrays @@ -258,7 +262,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws * Build the sub-aggregations into the buckets. This will usually * delegate to {@link #buildSubAggsForAllBuckets}. */ - abstract void buildSubAggs(B[][] topBucketsPerOrd) throws IOException; + abstract void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException; /** * Collect extra entries for "zero" hit documents if they were requested @@ -297,7 +301,7 @@ final ObjectArrayPriorityQueue buildPriorityQueue(int size) { } @Override - final void buildSubAggs(B[][] topBucketsPerOrd) throws IOException { + final void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -356,8 +360,8 @@ SortedNumericDocValues getValues(LeafReaderContext ctx) throws IOException { } @Override - LongTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new LongTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -397,7 +401,7 @@ LongTerms buildResult(long owningBucketOrd, long otherDocCount, LongTerms.Bucket bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), null ); } @@ -438,8 +442,8 @@ SortedNumericDocValues getValues(LeafReaderContext ctx) throws IOException { } @Override - DoubleTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new DoubleTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -479,7 +483,7 @@ DoubleTerms buildResult(long owningBucketOrd, long otherDocCount, DoubleTerms.Bu bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), null ); } @@ -551,8 +555,8 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } @Override - SignificantLongTerms.Bucket[][] buildTopBucketsPerOrd(int size) { - return new SignificantLongTerms.Bucket[size][]; + ObjectArray buildTopBucketsPerOrd(long size) { + return bigArrays().newObjectArray(size); } @Override @@ -583,7 +587,7 @@ ObjectArrayPriorityQueue buildPriorityQueue(int siz } @Override - void buildSubAggs(SignificantLongTerms.Bucket[][] topBucketsPerOrd) throws IOException { + void buildSubAggs(ObjectArray topBucketsPerOrd) throws IOException { buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); } @@ -601,7 +605,7 @@ SignificantLongTerms buildResult(long owningBucketOrd, long otherDocCoun, Signif subsetSizes.get(owningBucketOrd), supersetSize, significanceHeuristic, - List.of(topBuckets) + Arrays.asList(topBuckets) ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java index 2bc2833f0ddce..8a2c9d52f4212 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java @@ -12,6 +12,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.SetBackedScalingCuckooFilter; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.fielddata.FieldData; @@ -119,72 +121,74 @@ private void collectValue(BytesRef val, int doc, long owningBucketOrd, LeafBucke } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { /* * Collect the list of buckets, populate the filter with terms * that are too frequent, and figure out how to merge sub-buckets. */ - StringRareTerms.Bucket[][] rarestPerOrd = new StringRareTerms.Bucket[owningBucketOrds.length][]; - SetBackedScalingCuckooFilter[] filters = new SetBackedScalingCuckooFilter[owningBucketOrds.length]; - long keepCount = 0; - long[] mergeMap = new long[(int) bucketOrds.size()]; - Arrays.fill(mergeMap, -1); - long offset = 0; - for (int owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.length; owningOrdIdx++) { - try (BytesRefHash bucketsInThisOwningBucketToCollect = new BytesRefHash(1, bigArrays())) { - filters[owningOrdIdx] = newFilter(); - List builtBuckets = new ArrayList<>(); - BytesKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds[owningOrdIdx]); - BytesRef scratch = new BytesRef(); - while (collectedBuckets.next()) { - collectedBuckets.readValue(scratch); - long docCount = bucketDocCount(collectedBuckets.ord()); - // if the key is below threshold, reinsert into the new ords - if (docCount <= maxDocCount) { - StringRareTerms.Bucket bucket = new StringRareTerms.Bucket(BytesRef.deepCopyOf(scratch), docCount, null, format); - bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(scratch); - mergeMap[(int) collectedBuckets.ord()] = bucket.bucketOrd; - builtBuckets.add(bucket); - keepCount++; - } else { - filters[owningOrdIdx].add(scratch); + try ( + ObjectArray rarestPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()); + ObjectArray filters = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + try (LongArray mergeMap = bigArrays().newLongArray(bucketOrds.size())) { + mergeMap.fill(0, mergeMap.size(), -1); + long keepCount = 0; + long offset = 0; + for (long owningOrdIdx = 0; owningOrdIdx < owningBucketOrds.size(); owningOrdIdx++) { + try (BytesRefHash bucketsInThisOwningBucketToCollect = new BytesRefHash(1, bigArrays())) { + filters.set(owningOrdIdx, newFilter()); + List builtBuckets = new ArrayList<>(); + BytesKeyedBucketOrds.BucketOrdsEnum collectedBuckets = bucketOrds.ordsEnum(owningBucketOrds.get(owningOrdIdx)); + BytesRef scratch = new BytesRef(); + while (collectedBuckets.next()) { + collectedBuckets.readValue(scratch); + long docCount = bucketDocCount(collectedBuckets.ord()); + // if the key is below threshold, reinsert into the new ords + if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); + StringRareTerms.Bucket bucket = new StringRareTerms.Bucket( + BytesRef.deepCopyOf(scratch), + docCount, + null, + format + ); + bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(scratch); + mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); + builtBuckets.add(bucket); + keepCount++; + } else { + filters.get(owningOrdIdx).add(scratch); + } + } + rarestPerOrd.set(owningOrdIdx, builtBuckets.toArray(StringRareTerms.Bucket[]::new)); + offset += bucketsInThisOwningBucketToCollect.size(); } } - rarestPerOrd[owningOrdIdx] = builtBuckets.toArray(StringRareTerms.Bucket[]::new); - offset += bucketsInThisOwningBucketToCollect.size(); - } - } - /* - * Only merge/delete the ordinals if we have actually deleted one, - * to save on some redundant work. - */ - if (keepCount != mergeMap.length) { - LongUnaryOperator howToMerge = b -> mergeMap[(int) b]; - rewriteBuckets(offset, howToMerge); - if (deferringCollector() != null) { - ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + /* + * Only merge/delete the ordinals if we have actually deleted one, + * to save on some redundant work. + */ + if (keepCount != mergeMap.size()) { + LongUnaryOperator howToMerge = mergeMap::get; + rewriteBuckets(offset, howToMerge); + if (deferringCollector() != null) { + ((BestBucketsDeferringCollector) deferringCollector()).rewriteBuckets(howToMerge); + } + } } - } - /* - * Now build the results! - */ - buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - Arrays.sort(rarestPerOrd[ordIdx], ORDER.comparator()); - result[ordIdx] = new StringRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(rarestPerOrd[ordIdx]), - maxDocCount, - filters[ordIdx] - ); + /* + * Now build the results! + */ + buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + + return StringRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { + StringRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); + Arrays.sort(buckets, ORDER.comparator()); + return new StringRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } - return result; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java index 8742136c86ec6..cf65f1ff7c835 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.metrics; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorBase; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -36,11 +37,7 @@ protected MetricsAggregator(String name, AggregationContext context, Aggregator public abstract InternalAggregation buildAggregation(long owningBucketOrd) throws IOException; @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - results[ordIdx] = buildAggregation(owningBucketOrds[ordIdx]); - } - return results; + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildAggregation(owningBucketOrds.get(ordIdx))); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java index fff1990c29750..90e84acc7cad5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.profile.aggregation; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -68,7 +69,7 @@ public BucketComparator bucketComparator(String key, SortOrder order) { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { Timer timer = profileBreakdown.getNewTimer(AggregationTimingType.BUILD_AGGREGATION); InternalAggregation[] result; timer.start(); diff --git a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java index 7bf172388eccd..0db3de9abdb7b 100644 --- a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java +++ b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; import java.io.IOException; @@ -22,7 +21,7 @@ * A specialized, bytes only request, that can potentially be optimized on the network * layer, specifically for the same large buffer send to several nodes. */ -public class BytesTransportRequest extends TransportRequest implements RefCounted { +public class BytesTransportRequest extends TransportRequest { final ReleasableBytesReference bytes; private final TransportVersion version; diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 9863d2156422d..f5a23cf68a26e 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -509,12 +510,12 @@ public void testGetDetailedMessage() { public void testToXContent() throws IOException { { ElasticsearchException e = new ElasticsearchException("test"); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ {"type":"exception","reason":"test"}"""); } { ElasticsearchException e = new IndexShardRecoveringException(new ShardId("_test", "_0", 5)); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "index_shard_recovering_exception", "reason": "CurrentState[RECOVERING] Already recovering", @@ -529,7 +530,7 @@ public void testToXContent() throws IOException { "foo", new IllegalStateException("bar") ); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "illegal_state_exception", "reason": "bar" @@ -537,7 +538,7 @@ public void testToXContent() throws IOException { } { ElasticsearchException e = new ElasticsearchException(new IllegalArgumentException("foo")); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "exception", "reason": "java.lang.IllegalArgumentException: foo", @@ -552,7 +553,7 @@ public void testToXContent() throws IOException { "foo", new ElasticsearchException("bar", new IllegalArgumentException("index is closed", new RuntimeException("foobar"))) ); - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ { "type": "exception", "reason": "foo", @@ -573,7 +574,7 @@ public void testToXContent() throws IOException { { ElasticsearchException e = new ElasticsearchException("foo", new IllegalStateException("bar")); - assertExceptionAsJson(e, """ + assertThrowableAsJson(e, """ { "type": "exception", "reason": "foo", @@ -602,21 +603,91 @@ public void testToXContent() throws IOException { } } + public void testGenerateFailureToXContentWithNoDetails() throws IOException { + { + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); + } + assertFailureAsJson(ex, """ + {"error":{"type":"file_not_found_exception","reason":"foo not found"}}""", false); + } + { + ParsingException ex = new ParsingException(1, 2, "foobar", null); + assertFailureAsJson(ex, """ + {"error":{"type":"parsing_exception","reason":"foobar"}}""", false); + } + + { // header and metadata shouldn't be rendered + ParsingException ex = new ParsingException(1, 2, "foobar", null); + ex.addMetadata("es.test1", "value1"); + ex.addMetadata("es.test2", "value2"); + ex.addHeader("test", "some value"); + ex.addHeader("test_multi", "some value", "another value"); + + String expected = """ + {"error":{"type": "parsing_exception","reason": "foobar"}}"""; + assertFailureAsJson(ex, expected, false); + } + } + + public void testGenerateFailureToXContentWithDetails() throws IOException { + { + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); + } + assertFailureAsJson(ex, """ + {"error":{"type":"file_not_found_exception","reason":"foo not found", + "root_cause":[{"type":"file_not_found_exception","reason":"foo not found"}]}}""", true); + } + { + ParsingException ex = new ParsingException(1, 2, "foobar", null); + assertFailureAsJson(ex, """ + {"error":{"type":"parsing_exception","reason":"foobar","line":1,"col":2, + "root_cause":[{"type":"parsing_exception","reason":"foobar","line":1,"col":2}]}}""", true); + } + + { // render header and metadata + ParsingException ex = new ParsingException(1, 2, "foobar", null); + ex.addMetadata("es.test1", "value1"); + ex.addMetadata("es.test2", "value2"); + ex.addHeader("test", "some value"); + ex.addHeader("test_multi", "some value", "another value"); + + String expectedFragment = """ + { + "type": "parsing_exception", + "reason": "foobar", + "line": 1, + "col": 2, + "test1": "value1", + "test2": "value2", + "header": { + "test_multi": [ + "some value", + "another value" + ], + "test": "some value" + } + """; + String expected = "{\"error\":" + expectedFragment + ",\"root_cause\":[" + expectedFragment + "}]}}"; + assertFailureAsJson(ex, expected, true); + } + } + public void testGenerateThrowableToXContent() throws IOException { { - Exception ex; - if (randomBoolean()) { - // just a wrapper which is omitted - ex = new RemoteTransportException("foobar", new FileNotFoundException("foo not found")); - } else { - ex = new FileNotFoundException("foo not found"); + Exception ex = new FileNotFoundException("foo not found"); + for (int i = 0; i < randomInt(10); i++) { + ex = new RemoteTransportException("foobar", ex); } - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ {"type":"file_not_found_exception","reason":"foo not found"}"""); } { ParsingException ex = new ParsingException(1, 2, "foobar", null); - assertExceptionAsJson(ex, """ + assertThrowableAsJson(ex, """ {"type":"parsing_exception","reason":"foobar","line":1,"col":2}"""); } @@ -656,7 +727,7 @@ public void testGenerateThrowableToXContent() throws IOException { "test": "some value" } }"""; - assertExceptionAsJson(ex, expected); + assertThrowableAsJson(ex, expected); } } @@ -697,7 +768,7 @@ public void testToXContentWithHeadersAndMetadata() throws IOException { } }"""; - assertExceptionAsJson(e, expectedJson); + assertThrowableAsJson(e, expectedJson); ElasticsearchException parsed; try (XContentParser parser = createParser(XContentType.JSON.xContent(), expectedJson)) { @@ -859,7 +930,7 @@ public void testFromXContentWithHeadersAndMetadata() throws IOException { } assertNotNull(parsed); - assertEquals(parsed.getMessage(), "Elasticsearch exception [type=exception, reason=foo]"); + assertEquals("Elasticsearch exception [type=exception, reason=foo]", parsed.getMessage()); assertThat(parsed.getHeaderKeys(), hasSize(1)); assertThat(parsed.getHeader("foo_1"), hasItem("foo1")); assertThat(parsed.getMetadataKeys(), hasSize(1)); @@ -996,11 +1067,40 @@ public void testThrowableToAndFromXContent() throws IOException { public void testUnknownFailureToAndFromXContent() throws IOException { final XContent xContent = randomFrom(XContentType.values()).xContent(); - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - // Prints a null failure using generateFailureXContent() - ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + // Prints a null failure using generateFailureXContent() + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); + + ElasticsearchException parsedFailure; + try (XContentParser parser = createParser(xContent, failureBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedFailure = ElasticsearchException.failureFromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + + // Failure was null, expecting a "unknown" reason + assertEquals("Elasticsearch exception [type=unknown, reason=unknown]", parsedFailure.getMessage()); + assertEquals(0, parsedFailure.getHeaders().size()); + assertEquals(0, parsedFailure.getMetadata().size()); + } + + public void testUnknownFailureToAndFromXContentV8() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + // Prints a null failure using generateFailureXContent() + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, null, randomBoolean()), + xContent.type(), + RestApiVersion.V_8, + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); ElasticsearchException parsedFailure; try (XContentParser parser = createParser(xContent, failureBytes)) { @@ -1021,10 +1121,46 @@ public void testFailureToAndFromXContentWithNoDetails() throws IOException { final XContent xContent = randomFrom(XContentType.values()).xContent(); final Exception failure = (Exception) randomExceptions().v1(); - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - ElasticsearchException.generateFailureXContent(builder, params, failure, false); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, failure, false), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); + + try (XContentParser parser = createParser(xContent, failureBytes)) { + failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); + } + + ElasticsearchException parsedFailure; + try (XContentParser parser = createParser(xContent, failureBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + parsedFailure = ElasticsearchException.failureFromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + assertNotNull(parsedFailure); + + String type = ElasticsearchException.getExceptionName(failure); + String reason = failure.getMessage(); + assertEquals(ElasticsearchException.buildMessage(type, reason, null), parsedFailure.getMessage()); + assertEquals(0, parsedFailure.getHeaders().size()); + assertEquals(0, parsedFailure.getMetadata().size()); + assertNull(parsedFailure.getCause()); + } + + public void testFailureToAndFromXContentWithNoDetailsV8() throws IOException { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + + final Exception failure = (Exception) randomExceptions().v1(); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, failure, false), + xContent.type(), + RestApiVersion.V_8, + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); try (XContentParser parser = createParser(xContent, failureBytes)) { failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); @@ -1165,10 +1301,12 @@ public void testFailureToAndFromXContentWithDetails() throws IOException { } Exception finalFailure = failure; - BytesReference failureBytes = toShuffledXContent((builder, params) -> { - ElasticsearchException.generateFailureXContent(builder, params, finalFailure, true); - return builder; - }, xContent.type(), ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference failureBytes = toShuffledXContent( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, finalFailure, true), + xContent.type(), + ToXContent.EMPTY_PARAMS, + randomBoolean() + ); try (XContentParser parser = createParser(xContent, failureBytes)) { failureBytes = BytesReference.bytes(shuffleXContent(parser, randomBoolean())); @@ -1197,13 +1335,20 @@ private static void assertToXContentAsJson(ToXContent e, String expectedJson) th assertToXContentEquivalent(new BytesArray(expectedJson), actual, XContentType.JSON); } - private static void assertExceptionAsJson(Exception e, String expectedJson) throws IOException { + private static void assertThrowableAsJson(Throwable e, String expectedJson) throws IOException { assertToXContentAsJson((builder, params) -> { ElasticsearchException.generateThrowableXContent(builder, params, e); return builder; }, expectedJson); } + private static void assertFailureAsJson(Exception e, String expectedJson, boolean detailed) throws IOException { + assertToXContentAsJson( + (builder, params) -> ElasticsearchException.generateFailureXContent(builder, params, e, detailed), + expectedJson + ); + } + public static void assertDeepEquals(ElasticsearchException expected, ElasticsearchException actual) { do { if (expected == null) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 7a31f0dcb4631..a7058e5d6cd8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1057,6 +1057,8 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java index e9188d9cb8f0d..a72630c327ea2 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/CCSTelemetrySnapshotTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.admin.cluster.stats.CCSTelemetrySnapshot.PerClusterCCSTelemetry; import org.elasticsearch.action.admin.cluster.stats.LongMetric.LongMetricValue; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Tuple; @@ -32,9 +33,13 @@ public class CCSTelemetrySnapshotTests extends AbstractWireSerializingTestCase { private LongMetricValue randomLongMetricValue() { + return randomLongMetricValueBetween(0, 1_000_000); + } + + private LongMetricValue randomLongMetricValueBetween(int low, int high) { LongMetric v = new LongMetric(); for (int i = 0; i < randomIntBetween(5, 10); i++) { - v.record(randomIntBetween(0, 1_000_000)); + v.record(randomIntBetween(low, high)); } return v.getValue(); } @@ -330,4 +335,21 @@ private String readJSONFromResource(String fileName) throws IOException { return new String(inputStream.readAllBytes(), StandardCharsets.UTF_8); } } + + public void testRanges() throws IOException { + var value1 = randomLongMetricValueBetween(1_000_000, 10_000_000); + var count1 = value1.count(); + var max1 = value1.max(); + var output = new BytesStreamOutput(); + value1.writeTo(output); + var value1Read = LongMetricValue.fromStream(output.bytes().streamInput()); + var value2 = randomLongMetricValueBetween(0, 100); + var count2 = value2.count(); + output = new BytesStreamOutput(); + value2.writeTo(output); + var value2Read = LongMetricValue.fromStream(output.bytes().streamInput()); + value2Read.add(value1Read); + assertThat(value2Read.count(), equalTo(count1 + count2)); + assertThat(value2Read.max(), equalTo(max1)); + } } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java new file mode 100644 index 0000000000000..fe839c28aab88 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamResponseTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamResponse::new; + } + + @Override + protected ReindexDataStreamResponse createTestInstance() { + return new ReindexDataStreamResponse(randomAlphaOfLength(40)); + } + + @Override + protected ReindexDataStreamResponse mutateInstance(ReindexDataStreamResponse instance) { + return createTestInstance(); + } + + public void testToXContent() throws IOException { + ReindexDataStreamResponse response = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + response.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), equalTo(Map.of("task", response.getTaskId()))); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java index 4e6b2b17b2554..61284a49b2502 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java @@ -79,7 +79,7 @@ public void testXContentDeserialization() throws IOException { assertEquals(actualPipelines.size(), parsedPipelines.size()); for (PipelineConfiguration pipeline : parsedPipelines) { assertTrue(pipelinesMap.containsKey(pipeline.getId())); - assertEquals(pipelinesMap.get(pipeline.getId()).getConfigAsMap(), pipeline.getConfigAsMap()); + assertEquals(pipelinesMap.get(pipeline.getId()).getConfig(), pipeline.getConfig()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index f8ecdbd062054..725a4583d104a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -101,7 +101,7 @@ protected SearchPhase getNextPhase() { @Override protected void executePhaseOnShard( final SearchShardIterator shardIt, - final SearchShardTarget shard, + final Transport.Connection shard, final SearchActionListener listener ) {} diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 03c5d0a06f6fb..484b3c6b386fd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -147,7 +147,7 @@ public void executeNextPhase(SearchPhase currentPhase, Supplier nex @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection shard, SearchActionListener listener ) { onShardResult(new SearchPhaseResult() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index f655136cd4ba4..b4ddd48172d01 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; @@ -119,16 +118,15 @@ public void testSkipSearchShards() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per replica return Boolean.TRUE; }); new Thread(() -> { - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -227,23 +225,22 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); new Thread(() -> { safeAwait(awaitInitialRequests); - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() ); try { - if (shardFailures[shard.getShardId().id()]) { + if (shardFailures[shardIt.shardId().id()]) { listener.onFailure(new RuntimeException()); } else { listener.onResponse(testSearchPhaseResult); @@ -340,11 +337,11 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", testResponse.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); + var shardId = shardIt.shardId(); + assertTrue("shard: " + shardId + " has been queried twice", testResponse.queried.add(shardId)); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -464,13 +461,13 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - assertTrue("shard: " + shard.getShardId() + " has been queried twice", response.queried.add(shard.getShardId())); - Transport.Connection connection = getConnection(null, shard.getNodeId()); + var shardId = shardIt.shardId(); + assertTrue("shard: " + shardId + " has been queried twice", response.queried.add(shardId)); final TestSearchPhaseResult testSearchPhaseResult; - if (shard.getShardId().id() == 0) { + if (shardId.id() == 0) { testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); } else { testSearchPhaseResult = new TestSearchPhaseResult( @@ -573,15 +570,14 @@ public void testAllowPartialResults() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { - seenShard.computeIfAbsent(shard.getShardId(), (i) -> { + seenShard.computeIfAbsent(shardIt.shardId(), (i) -> { numRequests.incrementAndGet(); // only count this once per shard copy return Boolean.TRUE; }); new Thread(() -> { - Transport.Connection connection = getConnection(null, shard.getNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode() @@ -673,7 +669,7 @@ public void testSkipUnavailableSearchShards() throws InterruptedException { @Override protected void executePhaseOnShard( SearchShardIterator shardIt, - SearchShardTarget shard, + Transport.Connection connection, SearchActionListener listener ) { assert false : "Expected to skip all shards"; diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java index a1b9c59571496..303b75098ab67 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymRuleActionTests.java @@ -26,7 +26,7 @@ public void testEmptyRequestBody() throws Exception { .withParams(Map.of("synonymsSet", "testSet", "synonymRuleId", "testRule")) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 0); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0); try (var threadPool = createThreadPool()) { final var nodeClient = new NoOpNodeClient(threadPool); expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient)); diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java index 4dce73fcf0e89..915c338195c86 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/PutSynonymsActionTests.java @@ -26,7 +26,7 @@ public void testEmptyRequestBody() throws Exception { .withParams(Map.of("synonymsSet", "test")) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 0); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 0); try (var threadPool = createThreadPool()) { final var nodeClient = new NoOpNodeClient(threadPool); expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, channel, nodeClient)); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 4518bd655346a..226f5dbf3b2ff 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -22,10 +22,14 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistryTests; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -38,6 +42,7 @@ import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.TestTransportChannel; @@ -49,6 +54,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ToXContent; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -155,6 +161,7 @@ public void doRun() { final var joinValidationService = new JoinValidationService( settings, transportService, + writableRegistry(), () -> usually() ? clusterState : null, clusterState::metadata, List.of() @@ -286,7 +293,14 @@ public void writeTo(StreamOutput out) {} ); // registers request handler - new JoinValidationService(Settings.EMPTY, joiningNodeTransportService, () -> clusterState, clusterState::metadata, List.of()); + new JoinValidationService( + Settings.EMPTY, + joiningNodeTransportService, + writableRegistry(), + () -> clusterState, + clusterState::metadata, + List.of() + ); joiningNodeTransportService.start(); joiningNodeTransportService.acceptIncomingRequests(); @@ -325,6 +339,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req final var joinValidationService = new JoinValidationService( Settings.EMPTY, masterTransportService, + writableRegistry(), () -> clusterState, clusterState::metadata, List.of() @@ -349,7 +364,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req } } - public void testJoinValidationRejectsMismatchedClusterUUID() { + public void testJoinValidationRejectsMismatchedClusterUUID() throws IOException { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); final var localNode = DiscoveryNodeUtils.create("node0"); @@ -371,7 +386,14 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { final var settings = Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), dataPath).build(); // registers request handler - new JoinValidationService(settings, transportService, () -> localClusterState, localClusterState::metadata, List.of()); + new JoinValidationService( + settings, + transportService, + writableRegistry(), + () -> localClusterState, + localClusterState::metadata, + List.of() + ); transportService.start(); transportService.acceptIncomingRequests(); @@ -384,7 +406,7 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(otherClusterState), + serializeClusterState(otherClusterState), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -401,6 +423,22 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { ); } + private static BytesTransportRequest serializeClusterState(ClusterState clusterState) { + try ( + var bytesStream = new BytesStreamOutput(); + var compressedStream = new OutputStreamStreamOutput( + CompressorFactory.COMPRESSOR.threadLocalOutputStream(Streams.flushOnCloseStream(bytesStream)) + ) + ) { + compressedStream.setTransportVersion(TransportVersion.current()); + clusterState.writeTo(compressedStream); + compressedStream.flush(); + return new BytesTransportRequest(ReleasableBytesReference.wrap(bytesStream.bytes()), TransportVersion.current()); + } catch (Exception e) { + throw new AssertionError(e); + } + } + public void testJoinValidationRunsJoinValidators() { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); @@ -420,11 +458,12 @@ public void testJoinValidationRunsJoinValidators() { new JoinValidationService( Settings.EMPTY, transportService, + writableRegistry(), () -> localClusterState, localClusterState::metadata, List.of((node, state) -> { assertSame(node, localNode); - assertSame(state, stateForValidation); + assertEquals(state.stateUUID(), stateForValidation.stateUUID()); throw new IllegalStateException("simulated validation failure"); }) ); // registers request handler @@ -435,7 +474,7 @@ public void testJoinValidationRunsJoinValidators() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(stateForValidation), + serializeClusterState(stateForValidation), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -467,9 +506,16 @@ protected void onSendRequest(long requestId, String action, TransportRequest req null, Collections.emptySet() ); - final var joinValidationService = new JoinValidationService(Settings.EMPTY, masterTransportService, () -> null, () -> { - throw new AssertionError("should not be called"); - }, List.of()); + final var joinValidationService = new JoinValidationService( + Settings.EMPTY, + masterTransportService, + writableRegistry(), + () -> null, + () -> { + throw new AssertionError("should not be called"); + }, + List.of() + ); masterTransportService.start(); masterTransportService.acceptIncomingRequests(); diff --git a/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java b/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java deleted file mode 100644 index 00cfac7248da6..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/features/NodeFeaturesFixupListenerTests.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.cluster.features; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.features.NodeFeatures; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesRequest; -import org.elasticsearch.action.admin.cluster.node.features.NodesFeaturesResponse; -import org.elasticsearch.action.admin.cluster.node.features.TransportNodesFeaturesAction; -import org.elasticsearch.client.internal.ClusterAdminClient; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener.NodesFeaturesTask; -import org.elasticsearch.cluster.features.NodeFeaturesFixupListener.NodesFeaturesUpdater; -import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.node.VersionInformation; -import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; -import org.elasticsearch.cluster.service.MasterServiceTaskQueue; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; -import org.mockito.ArgumentCaptor; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executor; - -import static org.elasticsearch.test.LambdaMatchers.transformedMatch; -import static org.hamcrest.Matchers.arrayContainingInAnyOrder; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.ArgumentMatchers.same; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.hamcrest.MockitoHamcrest.argThat; - -public class NodeFeaturesFixupListenerTests extends ESTestCase { - - @SuppressWarnings("unchecked") - private static MasterServiceTaskQueue newMockTaskQueue() { - return mock(MasterServiceTaskQueue.class); - } - - private static DiscoveryNodes nodes(Version... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add(DiscoveryNodeUtils.create("node" + i, new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i), versions[i])); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - private static DiscoveryNodes nodes(VersionInformation... versions) { - var builder = DiscoveryNodes.builder(); - for (int i = 0; i < versions.length; i++) { - builder.add( - DiscoveryNodeUtils.builder("node" + i) - .address(new TransportAddress(TransportAddress.META_ADDRESS, 9200 + i)) - .version(versions[i]) - .build() - ); - } - builder.localNodeId("node0").masterNodeId("node0"); - return builder.build(); - } - - @SafeVarargs - private static Map> features(Set... nodeFeatures) { - Map> features = new HashMap<>(); - for (int i = 0; i < nodeFeatures.length; i++) { - features.put("node" + i, nodeFeatures[i]); - } - return features; - } - - private static NodesFeaturesResponse getResponse(Map> responseData) { - return new NodesFeaturesResponse( - ClusterName.DEFAULT, - responseData.entrySet() - .stream() - .map( - e -> new NodeFeatures( - e.getValue(), - DiscoveryNodeUtils.create(e.getKey(), new TransportAddress(TransportAddress.META_ADDRESS, 9200)) - ) - ) - .toList(), - List.of() - ); - } - - public void testNothingDoneWhenNothingToFix() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(Set.of("f1", "f2"), Set.of("f1", "f2"))) - .build(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - - verify(taskQueue, never()).submitTask(anyString(), any(), any()); - } - - public void testFeaturesFixedAfterNewMaster() throws Exception { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - ArgumentCaptor task = ArgumentCaptor.captor(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - - action.getValue().onResponse(getResponse(Map.of("node1", features, "node2", features))); - verify(taskQueue).submitTask(anyString(), task.capture(), any()); - - ClusterState newState = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( - testState, - new NodesFeaturesUpdater(), - List.of(task.getValue()) - ); - - assertThat(newState.clusterFeatures().allNodeFeatures(), containsInAnyOrder("f1", "f2")); - } - - public void testFeaturesFetchedOnlyForUpdatedNodes() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes( - nodes( - VersionInformation.CURRENT, - VersionInformation.CURRENT, - new VersionInformation(Version.V_8_12_0, IndexVersion.current(), IndexVersion.current()) - ) - ) - .nodeFeatures(features(Set.of("f1", "f2"), Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1"))), - action.capture() - ); - } - - public void testConcurrentChangesDoNotOverlap() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState1 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - NodeFeaturesFixupListener listeners = new NodeFeaturesFixupListener(taskQueue, client, null, null); - listeners.clusterChanged(new ClusterChangedEvent("test", testState1, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - any() - ); - // don't send back the response yet - - ClusterState testState2 = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, features, Set.of())) - .build(); - // should not send any requests - listeners.clusterChanged(new ClusterChangedEvent("test", testState2, testState1)); - verifyNoMoreInteractions(client); - } - - public void testFailedRequestsAreRetried() { - MasterServiceTaskQueue taskQueue = newMockTaskQueue(); - ClusterAdminClient client = mock(ClusterAdminClient.class); - Scheduler scheduler = mock(Scheduler.class); - Executor executor = mock(Executor.class); - Set features = Set.of("f1", "f2"); - - ClusterState testState = ClusterState.builder(ClusterState.EMPTY_STATE) - .nodes(nodes(Version.CURRENT, Version.CURRENT, Version.CURRENT)) - .nodeFeatures(features(features, Set.of(), Set.of())) - .build(); - - ArgumentCaptor> action = ArgumentCaptor.captor(); - ArgumentCaptor retry = ArgumentCaptor.forClass(Runnable.class); - - NodeFeaturesFixupListener listener = new NodeFeaturesFixupListener(taskQueue, client, scheduler, executor); - listener.clusterChanged(new ClusterChangedEvent("test", testState, ClusterState.EMPTY_STATE)); - verify(client).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - - action.getValue().onFailure(new RuntimeException("failure")); - verify(scheduler).schedule(retry.capture(), any(), same(executor)); - - // running the retry should cause another call - retry.getValue().run(); - verify(client, times(2)).execute( - eq(TransportNodesFeaturesAction.TYPE), - argThat(transformedMatch(NodesFeaturesRequest::nodesIds, arrayContainingInAnyOrder("node1", "node2"))), - action.capture() - ); - } -} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..57c360dc6a92a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -10,163 +10,90 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; import java.util.Locale; +import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DateMathExpressionResolverTests extends ESTestCase { - private final Context context = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictExpand(), - SystemIndexAccessLevel.NONE - ); + private final long now = randomMillisUpToYear9999(); + private final LongSupplier getTime = () -> now; - private static ZonedDateTime dateFromMillis(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); - } + public void testNoDateMathExpression() { + String expression = randomAlphaOfLength(10); + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); - private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { - DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); - return dateFormatter.format(zonedDateTime); + expression = "*"; + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); } - public void testNormal() throws Exception { - int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); - for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); - } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(indexExpressions.size())); - for (int i = 0; i < indexExpressions.size(); i++) { - assertThat(result.get(i), equalTo(indexExpressions.get(i))); - } - } + public void testExpression() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + result = DateMathExpressionResolver.resolveExpression("<.watch_history-{now}>", getTime); + assertThat(result, equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( - "<-before-inner-{now}>", - "-", - "", - "<-after-inner-{now}>", - "-" - ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-after-outer-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))) - ) - ); - Context noWildcardExpandContext = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictSingleIndexNoExpandForbidClosed(), - SystemIndexAccessLevel.NONE - ); - result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-"), - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-") - ) - ); - } + String result = DateMathExpressionResolver.resolveExpression("<-before-inner-{now}>", getTime); + assertThat(result, equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "*")); + + result = DateMathExpressionResolver.resolveExpression("<-after-inner-{now}>", getTime); + assertThat(result, equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); - assertThat(result.size(), equalTo(0)); } - public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + public void testExpression_Static() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-test>", getTime); + assertThat(result, equalTo(".marvel-test")); } - public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); - assertThat(result.size(), equalTo(1)); + public void testExpression_MultiParts() { + String result = DateMathExpressionResolver.resolveExpression("<.text1-{now/d}-text2-{now/M}>", getTime); assertThat( - result.get(0), + result, equalTo( ".text1-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "-text2-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()).withDayOfMonth(1)) + + formatDate("uuuu.MM.dd", dateFromMillis(now).withDayOfMonth(1)) ) ); } - public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); - assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - } - - public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + public void testExpression_CustomFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{yyyy.MM.dd}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + public void testExpression_EscapeStatic() { + String result = DateMathExpressionResolver.resolveExpression("<.mar\\{v\\}el-{now/d}>", getTime); + assertThat(result, equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") - ); - assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + public void testExpression_EscapeDateFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{'\\{year\\}'yyyy}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(now)))); } - public void testExpression_CustomTimeZoneInIndexName() throws Exception { + public void testExpression_CustomTimeZoneInIndexName() { ZoneId timeZone; int hoursOffset; int minutesOffset = 0; @@ -194,57 +121,57 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { // rounding to today 00:00 now = ZonedDateTime.now(ZoneOffset.UTC).withHour(0).withMinute(0).withSecond(0); } - Context context = new Context( - this.context.getState(), - this.context.getOptions(), - now.toInstant().toEpochMilli(), - SystemIndexAccessLevel.NONE, - name -> false, - name -> false - ); - List results = DateMathExpressionResolver.resolve( - context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + + String result = DateMathExpressionResolver.resolveExpression( + "<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>", + () -> now.toInstant().toEpochMilli() ); - assertThat(results.size(), equalTo(1)); - logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, result); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } - public void testExpressionInvalidUnescaped() throws Exception { + public void testExpressionInvalidUnescaped() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolveExpression("<.mar}vel-{now/d}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); } - public void testExpressionInvalidDateMathFormat() throws Exception { + public void testExpressionInvalidDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - public void testExpressionInvalidEmptyDateMathFormat() throws Exception { + public void testExpressionInvalidEmptyDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); } - public void testExpressionInvalidOpenEnded() throws Exception { + public void testExpressionInvalidOpenEnded() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + static ZonedDateTime dateFromMillis(long millis) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); + } + + static String formatDate(String pattern, ZonedDateTime zonedDateTime) { + DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + return dateFormatter.format(zonedDateTime); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java index 41651d52ceb9f..0e4b8271ceac7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodeTests.java @@ -185,38 +185,6 @@ public void testNodeCPUsRoundUp() { } } - public void testDesiredNodeHasRangeFloatProcessors() { - final var settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(); - - { - final var desiredNode = new DesiredNode( - settings, - new DesiredNode.ProcessorsRange(0.4, 1.2), - ByteSizeValue.ofGb(1), - ByteSizeValue.ofGb(1) - ); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false)); - } - - { - final var desiredNode = new DesiredNode( - settings, - randomIntBetween(0, 10) + randomDoubleBetween(0.00001, 0.99999, true), - ByteSizeValue.ofGb(1), - ByteSizeValue.ofGb(1) - ); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(false)); - } - - { - final var desiredNode = new DesiredNode(settings, 2.0f, ByteSizeValue.ofGb(1), ByteSizeValue.ofGb(1)); - assertThat(desiredNode.clusterHasRequiredFeatures(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED::equals), is(true)); - assertThat(desiredNode.clusterHasRequiredFeatures(nf -> false), is(true)); - } - } - public void testEqualsOrProcessorsCloseTo() { final Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), randomAlphaOfLength(10)).build(); final double maxDelta = 1E-3; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 99470918ce063..30895767c33c2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -47,6 +48,7 @@ import java.time.LocalDate; import java.time.ZoneOffset; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -58,6 +60,8 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFailureStore; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.dateFromMillis; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.formatDate; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_HIDDEN_SETTING; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; @@ -885,10 +889,7 @@ public void testConcreteIndicesIgnoreIndicesEmptyRequest() { IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE ); - assertThat( - newHashSet(indexNameExpressionResolver.concreteIndexNames(context, new String[] {})), - equalTo(newHashSet("kuku", "testXXX")) - ); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context)), equalTo(newHashSet("kuku", "testXXX"))); } public void testConcreteIndicesNoIndicesErrorMessage() { @@ -1408,52 +1409,56 @@ public void testConcreteIndicesWildcardNoMatch() { } } - public void testIsAllIndicesNull() throws Exception { + public void testIsAllIndicesNull() { assertThat(IndexNameExpressionResolver.isAllIndices(null), equalTo(true)); } - public void testIsAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Collections.emptyList()), equalTo(true)); + public void testIsAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of()), equalTo(true)); + } + + public void testIsAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all")), equalTo(true)); } - public void testIsAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all")), equalTo(true)); + public void testIsAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all", "other")), equalTo(false)); } - public void testIsAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsNoneIndices() { + assertThat(IndexNameExpressionResolver.isNoneExpression(new String[] { "*", "-*" }), equalTo(true)); } - public void testIsAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("*")), equalTo(false)); + public void testIsAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("*")), equalTo(false)); } - public void testIsExplicitAllIndicesNull() throws Exception { + public void testIsExplicitAllIndicesNull() { assertThat(IndexNameExpressionResolver.isExplicitAllPattern(null), equalTo(false)); } - public void testIsExplicitAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Collections.emptyList()), equalTo(false)); + public void testIsExplicitAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of()), equalTo(false)); } - public void testIsExplicitAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all")), equalTo(true)); + public void testIsExplicitAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all")), equalTo(true)); } - public void testIsExplicitAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsExplicitAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all", "other")), equalTo(false)); } - public void testIsExplicitAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsExplicitAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsExplicitAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("*")), equalTo(false)); + public void testIsExplicitAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("*")), equalTo(false)); } public void testIndexOptionsFailClosedIndicesAndAliases() { @@ -1580,16 +1585,13 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("test-0", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") - ); - assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of("test-0", "test-1", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals(Set.of("test-1", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1598,16 +1600,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of("alias-0", "alias-1"); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "test-1", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1742,7 +1744,7 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of("test-0", "test-alias"); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1771,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1853,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1891,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1927,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1968,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -2328,40 +2330,40 @@ public void testFullWildcardSystemIndexResolutionWithExpandHiddenAllowed() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); } public void testWildcardSystemIndexResolutionMultipleMatchesAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); } public void testWildcardSystemIndexResolutionSingleMatchAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); } public void testSingleSystemIndexResolutionAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta")); } public void testFullWildcardSystemIndicesAreHidden() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(randomFrom("*", "_all")); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining("some-other-index")); } public void testFullWildcardSystemIndexResolutionDeprecated() { @@ -2370,8 +2372,8 @@ public void testFullWildcardSystemIndexResolutionDeprecated() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); assertWarnings( true, new DeprecationWarning( @@ -2388,8 +2390,8 @@ public void testSingleSystemIndexResolutionDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".ml-meta")); assertWarnings( true, new DeprecationWarning( @@ -2405,8 +2407,8 @@ public void testWildcardSystemIndexResolutionSingleMatchDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); assertWarnings( true, new DeprecationWarning( @@ -2423,8 +2425,8 @@ public void testWildcardSystemIndexResolutionMultipleMatchesDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); assertWarnings( true, new DeprecationWarning( @@ -2479,8 +2481,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2496,8 +2498,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2515,8 +2517,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2526,8 +2528,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2538,8 +2540,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2549,8 +2551,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -3073,7 +3075,6 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); - ; } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; @@ -3239,6 +3240,37 @@ public void testDataStreamsNames() { assertThat(names, empty()); } + public void testDateMathMixedArray() { + long now = System.currentTimeMillis(); + String dataMathIndex1 = ".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)); + String dateMathIndex2 = ".logstash-" + formatDate("uuuu.MM", dateFromMillis(now).withDayOfMonth(1)); + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + ClusterState.builder(new ClusterName("_name")) + .metadata( + Metadata.builder() + .put(indexBuilder("name1")) + .put(indexBuilder("name2")) + .put(indexBuilder(dataMathIndex1)) + .put(indexBuilder(dateMathIndex2)) + ) + .build(), + IndicesOptions.strictExpand(), + now, + SystemIndexAccessLevel.NONE, + Predicates.never(), + Predicates.never() + ); + Collection result = IndexNameExpressionResolver.resolveExpressionsToResources( + context, + "name1", + "<.marvel-{now/d}>", + "name2", + "<.logstash-{now/M{uuuu.MM}}>" + ); + assertThat(result.size(), equalTo(4)); + assertThat(result, contains("name1", dataMathIndex1, "name2", dateMathIndex2)); + } + public void testMathExpressionSupport() { Instant instant = LocalDate.of(2021, 01, 11).atStartOfDay().toInstant(ZoneOffset.UTC); String resolved = IndexNameExpressionResolver.resolveDateMathExpression("", instant.toEpochMilli()); @@ -3418,10 +3450,6 @@ private ClusterState systemIndexTestClusterState() { return ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); } - private List resolveConcreteIndexNameList(ClusterState state, SearchRequest request) { - return Arrays.stream(indexNameExpressionResolver.concreteIndices(state, request)).map(Index::getName).toList(); - } - private static IndexMetadata.Builder indexBuilder(String index, Settings additionalSettings) { return IndexMetadata.builder(index).settings(indexSettings(IndexVersion.current(), 1, 0).put(additionalSettings)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..6a26e7948784c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,23 +13,20 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class WildcardExpressionResolverTests extends ESTestCase { @@ -50,107 +47,31 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*")), + equalTo(newHashSet("kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") - ) - ), - equalTo(newHashSet("testXYY")) - ); - if (indicesOptions == IndicesOptions.lenientExpandOpen()) { - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) - ); - } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(context, "testXXX", "-testXXX") - ); - assertEquals("-testXXX", infe.getIndex().getName()); - } - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) - ); - } - - public void testConvertWildcardsTests() { - Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").putAlias(AliasMetadata.builder("alias1")).putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testXYY").putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testYYY").putAlias(AliasMetadata.builder("alias3"))) - .put(indexBuilder("kuku")); - ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - - IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.lenientExpandOpen(), - SystemIndexAccessLevel.NONE - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); } public void testConvertWildcardsOpenClosedIndicesTests() { Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXXY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXYY").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("testYYY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testYYX").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("kuku").state(IndexMetadata.State.OPEN)); + .put(indexBuilder("testXXX").state(State.OPEN)) + .put(indexBuilder("testXXY").state(State.OPEN)) + .put(indexBuilder("testXYY").state(State.CLOSE)) + .put(indexBuilder("testYYY").state(State.OPEN)) + .put(indexBuilder("testYYX").state(State.CLOSE)) + .put(indexBuilder("kuku").state(State.OPEN)); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( @@ -159,7 +80,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -168,7 +89,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -177,26 +98,9 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY")) ); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(true, true, false, false), - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(context, "testX*").size(), equalTo(0)); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(false, true, false, false), - SystemIndexAccessLevel.NONE - ); - IndexNameExpressionResolver.Context finalContext = context; - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(finalContext, "testX*") - ); - assertThat(infe.getIndex().getName(), is("testX*")); } // issue #13334 @@ -217,28 +121,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y")), equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*")), equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*")), equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X")).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X")).size(), equalTo(0) ); } @@ -259,26 +162,6 @@ public void testAll() { newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - IndicesOptions noExpandOptions = IndicesOptions.fromOptions( - randomBoolean(), - true, - false, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - IndexNameExpressionResolver.Context noExpandContext = new IndexNameExpressionResolver.Context( - state, - noExpandOptions, - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } public void testAllAliases() { @@ -506,112 +389,47 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertEquals(0, indices.size()); } { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesStrictContext, - Collections.singletonList("foo_a*") - ) + Set indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + skipAliasesStrictContext, + "foo_a*" ); - assertEquals("foo_a*", infe.getIndex().getName()); + assertThat(indices, empty()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesLenientContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(skipAliasesStrictContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } - IndicesOptions noExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions(true, false, false, false, true, false, true, false); - IndexNameExpressionResolver.Context noExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - noExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - noExpandNoAliasesContext, - List.of("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( - false, - true, - false, - false, - true, - false, - true, - false - ); - IndexNameExpressionResolver.Context strictNoExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - strictNoExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(strictNoExpandNoAliasesContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } } public void testResolveDataStreams() { @@ -654,17 +472,14 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("bar_*") - ); + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "bar_*"); assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } @@ -691,9 +506,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -707,9 +522,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -748,9 +563,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -764,9 +579,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -808,24 +623,17 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + indicesAndAliasesContext, + "*" + ); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*"); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") - ); - assertThat( - iae.getMessage(), - containsString("The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead") - ); } private static IndexMetadata.Builder indexBuilder(String index, boolean hidden) { @@ -838,10 +646,6 @@ private static IndexMetadata.Builder indexBuilder(String index) { } private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) - ); - assertEquals(wildcardExpression, infe.getIndex().getName()); + assertThat(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, wildcardExpression), empty()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 21b30557cafea..e5786b1b3449e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,7 @@ import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO_2; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -27,16 +29,22 @@ public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false); - innerReadyForSearch(true); + innerReadyForSearch(false, false); + innerReadyForSearch(false, true); + innerReadyForSearch(true, false); + innerReadyForSearch(true, true); } - private void innerReadyForSearch(boolean fastRefresh) { + // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) + private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() ); + when(clusterState.getMinTransportVersion()).thenReturn( + beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO_2.id() - 1_00_0) : TransportVersion.current() + ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); IndexShardRoutingTable shardTable1 = new IndexShardRoutingTable( @@ -55,7 +63,7 @@ private void innerReadyForSearch(boolean fastRefresh) { shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -91,7 +99,7 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -118,8 +126,6 @@ private void innerReadyForSearch(boolean fastRefresh) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only with some replicas that are all available - // Fast refresh indices do not support replicas so this can not practically happen. If we add support we will want to ensure - // that readyForSearch allows for searching replicas when the index shard is not available. shardTable1 = new IndexShardRoutingTable( p1, List.of( @@ -137,8 +143,8 @@ private void innerReadyForSearch(boolean fastRefresh) { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); // if we support replicas for fast refreshes this needs to change + if (fastRefresh && beforeFastRefreshRCO) { + assertFalse(indexRoutingTable.readyForSearch(clusterState)); } else { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java index b18e2c0cd2647..9d33b697e31ca 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceShardsAllocatorTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -57,6 +58,7 @@ import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.TestThreadPool; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; @@ -79,7 +81,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class DesiredBalanceShardsAllocatorTests extends ESAllocationTestCase { @@ -916,6 +920,77 @@ public void resetDesiredBalance() { } } + public void testNotReconcileEagerlyForEmptyRoutingTable() { + final var threadPool = new TestThreadPool(getTestName()); + final var clusterService = ClusterServiceUtils.createClusterService(ClusterState.EMPTY_STATE, threadPool); + final var clusterSettings = createBuiltInClusterSettings(); + final var shardsAllocator = createShardsAllocator(); + final var reconciliationTaskSubmitted = new AtomicBoolean(); + final var desiredBalanceShardsAllocator = new DesiredBalanceShardsAllocator( + shardsAllocator, + threadPool, + clusterService, + new DesiredBalanceComputer(clusterSettings, TimeProviderUtils.create(() -> 1L), shardsAllocator) { + @Override + public DesiredBalance compute( + DesiredBalance previousDesiredBalance, + DesiredBalanceInput desiredBalanceInput, + Queue> pendingDesiredBalanceMoves, + Predicate isFresh + ) { + assertThat(previousDesiredBalance, sameInstance(DesiredBalance.INITIAL)); + return new DesiredBalance(desiredBalanceInput.index(), Map.of()); + } + }, + (clusterState, rerouteStrategy) -> null, + TelemetryProvider.NOOP, + EMPTY_NODE_ALLOCATION_STATS + ) { + + private ActionListener lastListener; + + @Override + public void allocate(RoutingAllocation allocation, ActionListener listener) { + lastListener = listener; + super.allocate(allocation, listener); + } + + @Override + protected void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { + fail("should not call reconcile"); + } + + @Override + protected void submitReconcileTask(DesiredBalance desiredBalance) { + assertThat(desiredBalance.lastConvergedIndex(), equalTo(0L)); + reconciliationTaskSubmitted.set(true); + lastListener.onResponse(null); + } + }; + assertThat(desiredBalanceShardsAllocator.getDesiredBalance(), sameInstance(DesiredBalance.INITIAL)); + try { + final PlainActionFuture future = new PlainActionFuture<>(); + desiredBalanceShardsAllocator.allocate( + new RoutingAllocation( + new AllocationDeciders(Collections.emptyList()), + clusterService.state(), + null, + null, + randomNonNegativeLong() + ), + future + ); + safeGet(future); + assertThat(desiredBalanceShardsAllocator.getStats().computationSubmitted(), equalTo(1L)); + assertThat(desiredBalanceShardsAllocator.getStats().computationExecuted(), equalTo(1L)); + assertThat(reconciliationTaskSubmitted.get(), is(true)); + assertThat(desiredBalanceShardsAllocator.getDesiredBalance().lastConvergedIndex(), equalTo(0L)); + } finally { + clusterService.close(); + terminate(threadPool); + } + } + private static IndexMetadata createIndex(String name) { return IndexMetadata.builder(name).settings(indexSettings(IndexVersion.current(), 1, 0)).build(); } diff --git a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java index 19d92568e6528..fa774c0bcfd12 100644 --- a/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java +++ b/server/src/test/java/org/elasticsearch/http/AbstractHttpServerTransportTests.java @@ -271,7 +271,7 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th final RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); final RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel( fakeRequest, - true, + randomBoolean(), RestStatus.BAD_REQUEST ); @@ -361,7 +361,11 @@ public void dispatchBadRequest(final RestChannel channel, final ThreadContext th Map> restHeaders = new HashMap<>(); restHeaders.put(Task.TRACE_PARENT_HTTP_HEADER, Collections.singletonList(traceParentValue)); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + RestControllerTests.AssertingChannel channel = new RestControllerTests.AssertingChannel( + fakeRequest, + randomBoolean(), + RestStatus.BAD_REQUEST + ); try ( AbstractHttpServerTransport transport = new AbstractHttpServerTransport( diff --git a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java index b4130120372a1..8da7ada91856d 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java @@ -37,6 +37,31 @@ public void testMemoryLimitSettingsFallbackToOldSingleLimitSetting() { assertThat(IndexingPressure.MAX_REPLICA_BYTES.get(settings), Matchers.equalTo(ByteSizeValue.ofKb(30))); } + public void testHighAndLowWatermarkSplits() { + IndexingPressure indexingPressure = new IndexingPressure(settings); + + try ( + Releasable ignored1 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(6).getBytes(), false); + Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(2).getBytes(), false) + ) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 0L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(1025, 10000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + + try (Releasable ignored3 = indexingPressure.markPrimaryOperationStarted(10, ByteSizeValue.ofKb(1).getBytes(), false)) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 127))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(129, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + } + } + } + public void testHighAndLowWatermarkSettings() { IndexingPressure indexingPressure = new IndexingPressure(settings); diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 77ab665166926..997cb123dbf8e 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -48,7 +48,6 @@ import java.util.concurrent.atomic.AtomicLong; import static org.elasticsearch.cluster.node.DiscoveryNode.STATELESS_ENABLED_SETTING_NAME; -import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.elasticsearch.index.cache.bitset.BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -253,35 +252,21 @@ public void testShouldLoadRandomAccessFiltersEagerly() { for (var hasIndexRole : values) { for (var loadFiltersEagerly : values) { for (var isStateless : values) { - for (var fastRefresh : values) { - if (isStateless == false && fastRefresh) { - // fast refresh is only relevant for stateless indices - continue; - } - - boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( - bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly, fastRefresh) - ); - if (isStateless) { - assertEquals(loadFiltersEagerly && ((hasIndexRole && fastRefresh) || hasIndexRole == false), result); - } else { - assertEquals(loadFiltersEagerly, result); - } + boolean result = BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( + bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly) + ); + if (isStateless) { + assertEquals(loadFiltersEagerly && hasIndexRole == false, result); + } else { + assertEquals(loadFiltersEagerly, result); } } } } } - private IndexSettings bitsetFilterCacheSettings( - boolean isStateless, - boolean hasIndexRole, - boolean loadFiltersEagerly, - boolean fastRefresh - ) { - var indexSettingsBuilder = Settings.builder() - .put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly) - .put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh); + private IndexSettings bitsetFilterCacheSettings(boolean isStateless, boolean hasIndexRole, boolean loadFiltersEagerly) { + var indexSettingsBuilder = Settings.builder().put(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), loadFiltersEagerly); var nodeSettingsBuilder = Settings.builder() .putList( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 399740e6200e6..d4d0e67ff4141 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -69,7 +69,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new PassThroughObjectMapper.Builder("labels").setPriority(0).setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index df6d9380fd141..d7f33b9cdb3ba 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -247,14 +247,14 @@ public void testSyntheticSourceInTimeSeries() throws IOException { }); DocumentMapper mapper = createTimeSeriesModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSyntheticSourceWithLogsIndexMode() throws IOException { XContentBuilder mapping = fieldMapping(b -> { b.field("type", "keyword"); }); DocumentMapper mapper = createLogsModeDocumentMapper(mapping); assertTrue(mapper.sourceMapper().isSynthetic()); - assertEquals("{\"_source\":{\"mode\":\"synthetic\"}}", mapper.sourceMapper().toString()); + assertEquals("{\"_source\":{}}", mapper.sourceMapper().toString()); } public void testSupportsNonDefaultParameterValues() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index fdc18264e2299..dc70c44a89128 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -384,7 +384,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Optional.empty()).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); diff --git a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index dabc8672733e2..0e8c7e0857251 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -65,11 +65,12 @@ public void testReadStringProperty() { } public void testReadStringPropertyInvalidType() { - try { - ConfigurationUtils.readStringProperty(null, null, config, "arr"); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readStringProperty(null, null, config, "arr") + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a string, but of type [java.util.Arrays$ArrayList]")); + } public void testReadBooleanProperty() { @@ -83,11 +84,11 @@ public void testReadNullBooleanProperty() { } public void testReadBooleanPropertyInvalidType() { - try { - ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true) + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]")); } public void testReadStringOrIntProperty() { @@ -98,11 +99,11 @@ public void testReadStringOrIntProperty() { } public void testReadStringOrIntPropertyInvalidType() { - try { - ConfigurationUtils.readStringOrIntProperty(null, null, config, "arr", null); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("[arr] property isn't a string or int, but of type [java.util.Arrays$ArrayList]")); - } + ElasticsearchParseException caught = assertThrows( + ElasticsearchParseException.class, + () -> ConfigurationUtils.readStringOrIntProperty(null, null, config, "arr", null) + ); + assertThat(caught.getMessage(), equalTo("[arr] property isn't a string or int, but of type [java.util.Arrays$ArrayList]")); } public void testReadMediaProperty() { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index b62fff2eceb28..8235c66ef976b 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -56,8 +56,8 @@ public void testFromXContent() throws IOException { assertEquals(2, custom.getPipelines().size()); assertEquals("1", custom.getPipelines().get("1").getId()); assertEquals("2", custom.getPipelines().get("2").getId()); - assertEquals(pipeline.getConfigAsMap(), custom.getPipelines().get("1").getConfigAsMap()); - assertEquals(pipeline2.getConfigAsMap(), custom.getPipelines().get("2").getConfigAsMap()); + assertEquals(pipeline.getConfig(), custom.getPipelines().get("1").getConfig()); + assertEquals(pipeline2.getConfig(), custom.getPipelines().get("2").getConfig()); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java index 202c4edb2d0c8..7be6e97762ccf 100644 --- a/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/PipelineConfigurationTests.java @@ -26,26 +26,57 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; import java.util.function.Predicate; +import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; public class PipelineConfigurationTests extends AbstractXContentTestCase { + public void testConfigInvariants() { + Map original = Map.of("a", 1); + Map mutable = new HashMap<>(original); + PipelineConfiguration configuration = new PipelineConfiguration("1", mutable); + // the config is equal to the original & mutable map, regardless of how you get a reference to it + assertThat(configuration.getConfig(), equalTo(original)); + assertThat(configuration.getConfig(), equalTo(mutable)); + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(false))); + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(true))); + // the config is the same instance as itself when unmodifiable is true + assertThat(configuration.getConfig(), sameInstance(configuration.getConfig())); + assertThat(configuration.getConfig(), sameInstance(configuration.getConfig(true))); + // but it's not the same instance as the original mutable map, nor if unmodifiable is false + assertThat(configuration.getConfig(), not(sameInstance(mutable))); + assertThat(configuration.getConfig(), not(sameInstance(configuration.getConfig(false)))); + + // changing the mutable map doesn't alter the pipeline's configuration + mutable.put("b", 2); + assertThat(configuration.getConfig(), equalTo(original)); + + // the modifiable map can be modified + Map modifiable = configuration.getConfig(false); + modifiable.put("c", 3); // this doesn't throw an exception + assertThat(modifiable.get("c"), equalTo(3)); + // but the next modifiable copy is a new fresh copy, and doesn't reflect those changes + assertThat(configuration.getConfig(), equalTo(configuration.getConfig(false))); + } + public void testSerialization() throws IOException { PipelineConfiguration configuration = new PipelineConfiguration( "1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON ); - assertEquals(XContentType.JSON, configuration.getXContentType()); - + assertThat(configuration.getConfig(), anEmptyMap()); BytesStreamOutput out = new BytesStreamOutput(); configuration.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); PipelineConfiguration serialized = PipelineConfiguration.readFrom(in); - assertEquals(XContentType.JSON, serialized.getXContentType()); - assertEquals("{}", serialized.getConfig().utf8ToString()); + assertThat(serialized.getConfig(), anEmptyMap()); } public void testMetaSerialization() throws IOException { @@ -56,13 +87,14 @@ public void testMetaSerialization() throws IOException { new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)), XContentType.JSON ); - assertEquals(XContentType.JSON, configuration.getXContentType()); BytesStreamOutput out = new BytesStreamOutput(); configuration.writeTo(out); StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); PipelineConfiguration serialized = PipelineConfiguration.readFrom(in); - assertEquals(XContentType.JSON, serialized.getXContentType()); - assertEquals(configJson, serialized.getConfig().utf8ToString()); + assertEquals( + XContentHelper.convertToMap(new BytesArray(configJson.getBytes(StandardCharsets.UTF_8)), true, XContentType.JSON).v2(), + serialized.getConfig() + ); } public void testParser() throws IOException { @@ -80,9 +112,8 @@ public void testParser() throws IOException { XContentParser xContentParser = xContentType.xContent() .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, bytes.streamInput()); PipelineConfiguration parsed = parser.parse(xContentParser, null); - assertEquals(xContentType.canonical(), parsed.getXContentType()); - assertEquals("{}", XContentHelper.convertToJson(parsed.getConfig(), false, parsed.getXContentType())); - assertEquals("1", parsed.getId()); + assertThat(parsed.getId(), equalTo("1")); + assertThat(parsed.getConfig(), anEmptyMap()); } public void testGetVersion() { diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java new file mode 100644 index 0000000000000..059cb15551acb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class PluginsLoaderTests extends ESTestCase { + + public void testToModuleName() { + assertThat(PluginsLoader.toModuleName("module.name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name1"), equalTo("module.name1")); + assertThat(PluginsLoader.toModuleName("1module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module!@#name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("!module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module_name"), equalTo("module_name")); + assertThat(PluginsLoader.toModuleName("-module-name-"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("_module_name"), equalTo("_module_name")); + assertThat(PluginsLoader.toModuleName("_"), equalTo("_")); + } +} diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index f927a12b50da3..b84f1d2c7635c 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -66,12 +66,12 @@ public class PluginsServiceTests extends ESTestCase { public static class FilterablePlugin extends Plugin implements ScriptPlugin {} static PluginsService newPluginsService(Settings settings) { - return new PluginsService(settings, null, null, TestEnvironment.newEnvironment(settings).pluginsFile()) { + return new PluginsService(settings, null, new PluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile()) { @Override protected void addServerExportsService(Map> qualifiedExports) { // tests don't run modular } - }; + }); } static PluginsService newMockPluginsService(List> classpathPlugins) { @@ -875,20 +875,6 @@ public void testCanCreateAClassLoader() { assertEquals(this.getClass().getClassLoader(), loader.getParent()); } - public void testToModuleName() { - assertThat(PluginsService.toModuleName("module.name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name1"), equalTo("module.name1")); - assertThat(PluginsService.toModuleName("1module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module!@#name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("!module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module_name"), equalTo("module_name")); - assertThat(PluginsService.toModuleName("-module-name-"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("_module_name"), equalTo("_module_name")); - assertThat(PluginsService.toModuleName("_"), equalTo("_")); - } - static final class Loader extends ClassLoader { Loader(ClassLoader parent) { super(parent); @@ -896,16 +882,17 @@ static final class Loader extends ClassLoader { } // Closes the URLClassLoaders and UberModuleClassloaders of plugins loaded by the given plugin service. + // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. static void closePluginLoaders(PluginsService pluginService) { for (var lp : pluginService.plugins()) { - if (lp.loader() instanceof URLClassLoader urlClassLoader) { + if (lp.instance().getClass().getClassLoader() instanceof URLClassLoader urlClassLoader) { try { PrivilegedOperations.closeURLClassLoader(urlClassLoader); } catch (IOException unexpected) { throw new UncheckedIOException(unexpected); } } - if (lp.loader() instanceof UberModuleClassLoader loader) { + if (lp.instance().getClass().getClassLoader() instanceof UberModuleClassLoader loader) { try { PrivilegedOperations.closeURLClassLoader(loader.getInternalLoader()); } catch (Exception e) { diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 8a8bed9ca73db..9f82911ed121f 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -73,7 +73,7 @@ public List routes() { params.put("consumed", randomAlphaOfLength(8)); params.put("unconsumed", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -108,7 +108,7 @@ public List routes() { params.put("unconsumed-first", randomAlphaOfLength(8)); params.put("unconsumed-second", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -155,7 +155,7 @@ public List routes() { params.put("very_close_to_parametre", randomAlphaOfLength(8)); params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) @@ -206,7 +206,7 @@ public List routes() { params.put("consumed", randomAlphaOfLength(8)); params.put("response_param", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -238,7 +238,7 @@ public List routes() { params.put("human", null); params.put("error_trace", randomFrom("true", "false", null)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -283,7 +283,7 @@ public List routes() { params.put("size", randomAlphaOfLength(8)); params.put("time", randomAlphaOfLength(8)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build(); - RestChannel channel = new FakeRestChannel(request, true, 1); + RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -314,7 +314,7 @@ public List routes() { new BytesArray(builder.toString()), XContentType.JSON ).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -341,7 +341,7 @@ public List routes() { }; final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); handler.handleRequest(request, channel, mockClient); assertTrue(restChannelConsumer.executed); assertTrue(restChannelConsumer.closed); @@ -371,7 +371,7 @@ public List routes() { new BytesArray(builder.toString()), XContentType.JSON ).build(); - final RestChannel channel = new FakeRestChannel(request, true, 1); + final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> handler.handleRequest(request, channel, mockClient) diff --git a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java index 907c16aad5fdc..eece90ed94cf9 100644 --- a/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java +++ b/server/src/test/java/org/elasticsearch/rest/ChunkedRestResponseBodyPartTests.java @@ -56,7 +56,7 @@ public void testEncodesChunkedXContentCorrectly() throws IOException { ToXContent.EMPTY_PARAMS, new FakeRestChannel( new FakeRestRequest.Builder(xContentRegistry()).withContent(BytesArray.EMPTY, randomXContent.type()).build(), - true, + randomBoolean(), 1 ) ); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index afdad1045b4de..b7d38f6f299c7 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -161,7 +161,7 @@ public void testApplyProductSpecificResponseHeaders() { final ThreadContext threadContext = client.threadPool().getThreadContext(); final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); // the rest controller relies on the caller to stash the context, so we should expect these values here as we didn't stash the // context in this test @@ -180,7 +180,7 @@ public void testRequestWithDisallowedMultiValuedHeader() { restHeaders.put("header.1", Collections.singletonList("boo")); restHeaders.put("header.2", List.of("foo", "bar")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); assertTrue(channel.getSendResponseCalled()); } @@ -211,7 +211,7 @@ public String getName() { }); } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.OK); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy( eq(1L), @@ -235,7 +235,7 @@ public MethodHandlers next() { return null; } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -257,7 +257,7 @@ public MethodHandlers next() { } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -280,7 +280,7 @@ public String getName() { })); when(spyRestController.getAllHandlers(any(), eq(fakeRequest.rawPath()))).thenAnswer(x -> handlers.iterator()); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.METHOD_NOT_ALLOWED); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); spyRestController.dispatchRequest(fakeRequest, channel, threadContext); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 405))); } @@ -290,7 +290,7 @@ public void testDispatchBadRequestEmitsMetric() { final RestController restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).build(); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest(channel, threadContext, new Exception()); verify(requestsCounter).incrementBy(eq(1L), eq(Map.of(STATUS_CODE_KEY, 400))); } @@ -314,7 +314,7 @@ public MethodHandlers next() { return new MethodHandlers("/").addMethod(GET, RestApiVersion.current(), (request, channel, client) -> {}); } }); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(fakeRequest, channel, threadContext); verify(tracer).startTrace( eq(threadContext), @@ -340,7 +340,7 @@ public void testRequestWithDisallowedMultiValuedHeaderButSameValues() { new RestResponse(RestStatus.OK, RestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY) ) ); - AssertingChannel channel = new AssertingChannel(fakeRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRequest, randomBoolean(), RestStatus.OK); restController.dispatchRequest(fakeRequest, channel, threadContext); assertTrue(channel.getSendResponseCalled()); } @@ -466,7 +466,7 @@ public void testRestInterceptor() throws Exception { ); restController.registerHandler(new Route(GET, "/wrapped"), handler); RestRequest request = testRestRequest("/wrapped", "{}", XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); httpServerTransport.start(); assertThat(wrapperCalled.get(), is(true)); @@ -477,7 +477,7 @@ public void testDispatchRequestAddsAndFreesBytesOnSuccess() { int contentLength = BREAKER_LIMIT.bytesAsInt(); String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -489,7 +489,7 @@ public void testDispatchRequestAddsAndFreesBytesOnError() { int contentLength = BREAKER_LIMIT.bytesAsInt(); String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/error", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -502,7 +502,7 @@ public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnError() { String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); // we will produce an error in the rest handler and one more when sending the error response RestRequest request = testRestRequest("/error", content, XContentType.JSON); - ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true); + ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, randomBoolean()); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -521,7 +521,7 @@ public void testDispatchRequestAddsAndFreesBytesOnlyOnceOnErrorDuringSend() { ); // we will produce an error in the rest handler and one more when sending the error response RestRequest request = testRestRequest("/foo", content, XContentType.JSON); - ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, true) { + ExceptionThrowingChannel channel = new ExceptionThrowingChannel(request, randomBoolean()) { @Override protected BytesStream newBytesOutput() { return new RecyclerBytesStreamOutput(recycler); @@ -538,7 +538,7 @@ public void testDispatchRequestLimitsBytes() { int contentLength = BREAKER_LIMIT.bytesAsInt() + 1; String content = randomAlphaOfLength((int) Math.round(contentLength / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, XContentType.JSON); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.TOO_MANY_REQUESTS); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.TOO_MANY_REQUESTS); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); @@ -549,7 +549,7 @@ public void testDispatchRequestLimitsBytes() { public void testDispatchRequiresContentTypeForRequestsWithContent() { String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); RestRequest request = testRestRequest("/", content, null); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController = new RestController(null, null, circuitBreakerService, usageService, telemetryProvider); restController.registerHandler( new Route(GET, "/"), @@ -566,7 +566,7 @@ public void testDispatchDoesNotRequireContentTypeForRequestsWithoutContent() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); @@ -582,7 +582,7 @@ public void testDispatchFailsWithPlainText() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler( new Route(GET, "/foo"), (request, channel1, client) -> channel1.sendResponse( @@ -603,7 +603,7 @@ public void testDispatchUnsupportedContentType() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); @@ -620,7 +620,7 @@ public void testDispatchWorksWithNewlineDelimitedJson() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -659,7 +659,7 @@ public void testDispatchWithContentStream() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -683,7 +683,7 @@ public void testDispatchWithContentStreamNoContentType() { RestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(new BytesArray("{}"), null) .withPath("/foo") .build(); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } @@ -712,7 +712,7 @@ public void testNonStreamingXContentCausesErrorResponse() throws IOException { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -737,7 +737,7 @@ public void testUnknownContentWithContentStream() { if (randomBoolean()) { fakeRestRequest = new RestRequest(fakeRestRequest); } - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { @@ -756,7 +756,7 @@ public boolean supportsBulkContent() { public void testDispatchBadRequest() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest( channel, client.threadPool().getThreadContext(), @@ -789,7 +789,7 @@ public boolean canTripCircuitBreaker() { .withContent(BytesReference.bytes(content), content.contentType()) .build(); - final AssertingChannel channel = new AssertingChannel(restRequest, true, RestStatus.OK); + final AssertingChannel channel = new AssertingChannel(restRequest, randomBoolean(), RestStatus.OK); assertFalse(channel.getSendResponseCalled()); assertFalse(restRequest.isContentConsumed()); @@ -801,7 +801,7 @@ public boolean canTripCircuitBreaker() { public void testDispatchBadRequestUnknownCause() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), null); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().content().utf8ToString(), containsString("unknown cause")); @@ -813,14 +813,14 @@ public void testDispatchBadRequestWithValidationException() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); // it's always a 400 bad request when dispatching "regular" {@code ElasticsearchException} - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.BAD_REQUEST); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.BAD_REQUEST); assertFalse(channel.getSendResponseCalled()); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), exception); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().content().utf8ToString(), containsString("bad bad exception")); // but {@code HttpHeadersValidationException} do carry over the rest response code - channel = new AssertingChannel(fakeRestRequest, true, status); + channel = new AssertingChannel(fakeRestRequest, randomBoolean(), status); assertFalse(channel.getSendResponseCalled()); restController.dispatchBadRequest(channel, client.threadPool().getThreadContext(), new HttpHeadersValidationException(exception)); assertTrue(channel.getSendResponseCalled()); @@ -831,7 +831,7 @@ public void testFavicon() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(GET) .withPath("/favicon.ico") .build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().contentType(), containsString("image/x-icon")); @@ -841,7 +841,7 @@ public void testFaviconWithWrongHttpMethod() { final FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod( randomValueOtherThanMany(m -> m == GET || m == OPTIONS, () -> randomFrom(RestRequest.Method.values())) ).withPath("/favicon.ico").build(); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); restController.dispatchRequest(fakeRestRequest, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); assertThat(channel.getRestResponse().getHeaders().containsKey("Allow"), equalTo(true)); @@ -917,7 +917,7 @@ public Exception getInboundException() { } }, null); - final AssertingChannel channel = new AssertingChannel(request, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); assertFalse(channel.getSendResponseCalled()); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); assertTrue(channel.getSendResponseCalled()); @@ -937,7 +937,7 @@ public Method method() { } }; - final AssertingChannel channel = new AssertingChannel(request, true, RestStatus.METHOD_NOT_ALLOWED); + final AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.METHOD_NOT_ALLOWED); restController.dispatchRequest(request, channel, client.threadPool().getThreadContext()); verify(tracer).startTrace(any(), any(RestRequest.class), anyString(), anyMap()); verify(tracer).addError(any(RestRequest.class), any(IllegalArgumentException.class)); @@ -951,7 +951,7 @@ public void testDispatchCompatibleHandler() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a compatible handler restController.registerHandler(GET, "/foo", RestApiVersion.minimumSupported(), (request, channel1, client) -> { @@ -975,7 +975,7 @@ public void testDispatchCompatibleRequestToNewlyAddedHandler() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a CURRENT newly added handler restController.registerHandler(new Route(GET, "/foo"), (request, channel1, client) -> { @@ -1018,7 +1018,7 @@ public void testCurrentVersionVNDMediaTypeIsNotUsingCompatibility() { final String mediaType = randomCompatibleMediaType(version); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // dispatch to a CURRENT newly added handler restController.registerHandler(new Route(GET, "/foo"), (request, channel1, client) -> { @@ -1041,7 +1041,7 @@ public void testCustomMediaTypeValidation() { final String mediaType = "application/x-protobuf"; FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.OK); // register handler that handles custom media type validation restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @@ -1068,7 +1068,7 @@ public void testBrowserSafelistedContentTypesAreRejected() { final String mediaType = randomFrom(RestController.SAFELISTED_MEDIA_TYPES); FakeRestRequest fakeRestRequest = requestWithContent(mediaType); - final AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.NOT_ACCEPTABLE); + final AssertingChannel channel = new AssertingChannel(fakeRestRequest, randomBoolean(), RestStatus.NOT_ACCEPTABLE); restController.registerHandler(new Route(GET, "/foo"), new RestHandler() { @Override @@ -1115,7 +1115,7 @@ public void testApiProtectionWithServerlessDisabled() { List accessiblePaths = List.of("/public", "/internal", "/hidden"); accessiblePaths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); }); } @@ -1137,12 +1137,12 @@ public void testApiProtectionWithServerlessEnabledAsEndUser() { final Consumer> checkUnprotected = paths -> paths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.OK); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.OK); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); }); final Consumer> checkProtected = paths -> paths.forEach(path -> { RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath(path).build(); - AssertingChannel channel = new AssertingChannel(request, true, RestStatus.GONE); + AssertingChannel channel = new AssertingChannel(request, randomBoolean(), RestStatus.GONE); restController.dispatchRequest(request, channel, new ThreadContext(Settings.EMPTY)); RestResponse restResponse = channel.getRestResponse(); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index 4345f3c5e3fb4..7fe2388ec5113 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -97,7 +97,7 @@ public void testUnsupportedMethodResponseHttpHeader() throws Exception { RestRequest restRequest = fakeRestRequestBuilder.build(); // Send the request and verify the response status code - FakeRestChannel restChannel = new FakeRestChannel(restRequest, true, 1); + FakeRestChannel restChannel = new FakeRestChannel(restRequest, randomBoolean(), 1); restController.dispatchRequest(restRequest, restChannel, new ThreadContext(Settings.EMPTY)); assertThat(restChannel.capturedResponse().status().getStatus(), is(405)); diff --git a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java index cfed83f352951..b85ad31288c8c 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestResponseTests.java @@ -93,7 +93,6 @@ public void testWithHeaders() throws Exception { assertThat(response.getHeaders().get("n1"), contains("v11", "v12")); assertThat(response.getHeaders().get("n2"), notNullValue()); assertThat(response.getHeaders().get("n2"), contains("v21", "v22")); - assertChannelWarnings(channel); } public void testEmptyChunkedBody() { @@ -114,11 +113,11 @@ public void testSimpleExceptionMessage() throws Exception { Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); RestResponse response = new RestResponse(channel, t); String text = response.content().utf8ToString(); - assertThat(text, containsString("ElasticsearchException[an error occurred reading data]")); - assertThat(text, not(containsString("FileNotFoundException"))); + assertThat(text, containsString(""" + {"type":"exception","reason":"an error occurred reading data"}""")); + assertThat(text, not(containsString("file_not_found_exception"))); assertThat(text, not(containsString("/foo/bar"))); assertThat(text, not(containsString("error_trace"))); - assertChannelWarnings(channel); } public void testDetailedExceptionMessage() throws Exception { @@ -134,20 +133,6 @@ public void testDetailedExceptionMessage() throws Exception { {"type":"file_not_found_exception","reason":"/foo/bar"}""")); } - public void testNonElasticsearchExceptionIsNotShownAsSimpleMessage() throws Exception { - RestRequest request = new FakeRestRequest(); - RestChannel channel = new SimpleExceptionRestChannel(request); - - Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); - RestResponse response = new RestResponse(channel, t); - String text = response.content().utf8ToString(); - assertThat(text, not(containsString("UnknownException[an error occurred reading data]"))); - assertThat(text, not(containsString("FileNotFoundException[/foo/bar]"))); - assertThat(text, not(containsString("error_trace"))); - assertThat(text, containsString("\"error\":\"No ElasticsearchException found\"")); - assertChannelWarnings(channel); - } - public void testErrorTrace() throws Exception { RestRequest request = new FakeRestRequest(); request.params().put("error_trace", "true"); @@ -177,7 +162,6 @@ public void testAuthenticationFailedNoStackTrace() throws IOException { RestResponse response = new RestResponse(channel, authnException); assertThat(response.status(), is(RestStatus.UNAUTHORIZED)); assertThat(response.content().utf8ToString(), not(containsString(ElasticsearchException.STACK_TRACE))); - assertChannelWarnings(channel); } } } @@ -202,7 +186,6 @@ public void testStackTrace() throws IOException { } else { assertThat(response.content().utf8ToString(), not(containsString(ElasticsearchException.STACK_TRACE))); } - assertChannelWarnings(channel); } } } @@ -232,9 +215,9 @@ public void testNullThrowable() throws Exception { RestResponse response = new RestResponse(channel, null); String text = response.content().utf8ToString(); - assertThat(text, containsString("\"error\":\"unknown\"")); + assertThat(text, containsString("\"type\":\"unknown\"")); + assertThat(text, containsString("\"reason\":\"unknown\"")); assertThat(text, not(containsString("error_trace"))); - assertChannelWarnings(channel); } public void testConvert() throws IOException { @@ -324,32 +307,26 @@ public void testErrorToAndFromXContent() throws IOException { original = new ElasticsearchException("ElasticsearchException without cause"); if (detailed) { addHeadersOrMetadata = randomBoolean(); - reason = "ElasticsearchException without cause"; - } else { - reason = "ElasticsearchException[ElasticsearchException without cause]"; } + reason = "ElasticsearchException without cause"; } case 1 -> { original = new ElasticsearchException("ElasticsearchException with a cause", new FileNotFoundException("missing")); if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "exception"; - reason = "ElasticsearchException with a cause"; cause = new ElasticsearchException("Elasticsearch exception [type=file_not_found_exception, reason=missing]"); - } else { - reason = "ElasticsearchException[ElasticsearchException with a cause]"; } + type = "exception"; + reason = "ElasticsearchException with a cause"; } case 2 -> { original = new ResourceNotFoundException("ElasticsearchException with custom status"); status = RestStatus.NOT_FOUND; if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "resource_not_found_exception"; - reason = "ElasticsearchException with custom status"; - } else { - reason = "ResourceNotFoundException[ElasticsearchException with custom status]"; } + type = "resource_not_found_exception"; + reason = "ElasticsearchException with custom status"; } case 3 -> { TransportAddress address = buildNewFakeTransportAddress(); @@ -360,12 +337,8 @@ public void testErrorToAndFromXContent() throws IOException { new ResourceAlreadyExistsException("ElasticsearchWrapperException with a cause that has a custom status") ); status = RestStatus.BAD_REQUEST; - if (detailed) { - type = "resource_already_exists_exception"; - reason = "ElasticsearchWrapperException with a cause that has a custom status"; - } else { - reason = "RemoteTransportException[[remote][" + address.toString() + "][action]]"; - } + type = "resource_already_exists_exception"; + reason = "ElasticsearchWrapperException with a cause that has a custom status"; } case 4 -> { original = new RemoteTransportException( @@ -373,23 +346,17 @@ public void testErrorToAndFromXContent() throws IOException { new IllegalArgumentException("wrong") ); status = RestStatus.BAD_REQUEST; - if (detailed) { - type = "illegal_argument_exception"; - reason = "wrong"; - } else { - reason = "RemoteTransportException[[ElasticsearchWrapperException with a cause that has a special treatment]]"; - } + type = "illegal_argument_exception"; + reason = "wrong"; } case 5 -> { status = randomFrom(RestStatus.values()); original = new ElasticsearchStatusException("ElasticsearchStatusException with random status", status); if (detailed) { addHeadersOrMetadata = randomBoolean(); - type = "status_exception"; - reason = "ElasticsearchStatusException with random status"; - } else { - reason = "ElasticsearchStatusException[ElasticsearchStatusException with random status]"; } + type = "status_exception"; + reason = "ElasticsearchStatusException with random status"; } default -> throw new UnsupportedOperationException("Failed to generate random exception"); } @@ -435,7 +402,6 @@ public void testErrorToAndFromXContent() throws IOException { assertEquals(expected.status(), parsedError.status()); assertDeepEquals(expected, parsedError); - assertChannelWarnings(channel); } public void testNoErrorFromXContent() throws IOException { @@ -502,7 +468,9 @@ public void testResponseContentTypeUponException() throws Exception { Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); RestResponse response = new RestResponse(channel, t); assertThat(response.contentType(), equalTo(mediaType)); - assertChannelWarnings(channel); + assertWarnings( + "The JSON format of non-detailed errors has changed in Elasticsearch 9.0 to match the JSON structure used for detailed errors." + ); } public void testSupressedLogging() throws IOException { @@ -534,7 +502,6 @@ public void testSupressedLogging() throws IOException { "401", "unauthorized" ); - assertChannelWarnings(channel); } private void assertLogging( @@ -560,15 +527,6 @@ private void assertLogging( } } - private void assertChannelWarnings(RestChannel channel) { - if (channel.detailedErrorsEnabled() == false) { - assertWarnings( - "The JSON format of non-detailed errors will change in Elasticsearch 9.0" - + " to match the JSON structure used for detailed errors. To keep using the existing format, use the V8 REST API." - ); - } - } - public static class WithHeadersException extends ElasticsearchException { WithHeadersException() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java index 03ae366050646..827a07b89b2b8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestBuilderListenerTests.java @@ -26,7 +26,7 @@ public class RestBuilderListenerTests extends ESTestCase { public void testXContentBuilderClosedInBuildResponse() throws Exception { AtomicReference builderAtomicReference = new AtomicReference<>(); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { @@ -44,7 +44,7 @@ public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws E public void testXContentBuilderNotClosedInBuildResponseAssertionsDisabled() throws Exception { AtomicReference builderAtomicReference = new AtomicReference<>(); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { @@ -68,7 +68,7 @@ public void testXContentBuilderNotClosedInBuildResponseAssertionsEnabled() throw assumeTrue("tests are not being run with assertions", RestBuilderListener.class.desiredAssertionStatus()); RestBuilderListener builderListener = new RestBuilderListener( - new FakeRestChannel(new FakeRestRequest(), true, 1) + new FakeRestChannel(new FakeRestRequest(), randomBoolean(), 1) ) { @Override public RestResponse buildResponse(Empty empty, XContentBuilder builder) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java index 8104ecfc31c3d..dad6885a08fa8 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestTasksActionTests.java @@ -34,7 +34,7 @@ public void testConsumesParameters() throws Exception { FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams( Map.of("parent_task_id", "the node:3", "nodes", "node1,node2", "actions", "*") ).build(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), 1); try (var threadPool = createThreadPool()) { final var nodeClient = buildNodeClient(threadPool); action.handleRequest(fakeRestRequest, fakeRestChannel, nodeClient); diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index 0d35e4311032d..f83ba1704f954 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -222,7 +222,7 @@ public void next() { }) .withHeaders(Map.of("Content-Type", Collections.singletonList("application/json"))) .build(); - FakeRestChannel channel = new FakeRestChannel(request, true, 1); + FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); RestBulkAction.ChunkHandler chunkHandler = new RestBulkAction.ChunkHandler( true, diff --git a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java index 4822b1c64cf41..d6953e79a0c3f 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/search/RestSearchActionTests.java @@ -51,7 +51,7 @@ public void testEnableFieldsEmulationNoErrors() throws Exception { .withParams(params) .build(); - action.handleRequest(request, new FakeRestChannel(request, true, 1), verifyingClient); + action.handleRequest(request, new FakeRestChannel(request, randomBoolean(), 1), verifyingClient); } public void testValidateSearchRequest() { diff --git a/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java new file mode 100644 index 0000000000000..c4a1699181efc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java @@ -0,0 +1,342 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValuesTests; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimDotProduct; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimInvHamming; +import org.elasticsearch.script.field.vectors.BitMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.ByteMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.FloatMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HexFormat; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MultiVectorScoreScriptUtilsTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testFloatMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][][] docVectors = new float[][][] { + { { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }, { 100.0f, 200.0f, -50.0f, 10.0f, -150.0f } } }; + float[][] docMagnitudes = new float[][] { { 0.0f, 0.0f } }; + for (int i = 0; i < docVectors.length; i++) { + for (int j = 0; j < docVectors[i].length; j++) { + docMagnitudes[i][j] = (float) Math.sqrt(VectorUtil.dotProduct(docVectors[i][j], docVectors[i][j])); + } + } + + List> queryVector = List.of(Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f)); + List> invalidQueryVector = List.of(Arrays.asList(0.5, 111.3)); + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ), + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + // Test max similarity dot product + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + float maxSimDotProductExpected = 65425.625f; // Adjust this value based on expected max similarity + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + maxSimDotProductExpected, + maxSimDotProduct.maxSimDotProduct(), + 0.001 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MultiVectorScoreScriptUtils.MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat(e.getMessage(), containsString("hamming distance is only supported for byte or bit vectors")); + + // Check scripting infrastructure integration + assertEquals(65425.6249, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct() + ); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testByteMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][] docVector = new float[][] { { 1, 127, -128, 5, -10 } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> queryVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 1, 125, -12, 2, 4 })); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "test", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + + // Check scripting infrastructure integration + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName).maxSimDotProduct(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, queryVector, fieldName).maxSimInvHamming(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, hexidecimalString, fieldName).maxSimInvHamming(), 0.001); + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows(IllegalArgumentException.class, maxSimDotProduct::maxSimDotProduct); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testBitMultiVectorClassBindingsDotProduct() throws IOException { + String fieldName = "vector"; + int dims = 8; + float[][] docVector = new float[][] { { 124 } }; + // 124 in binary is b01111100 + List> queryVector = List.of( + Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4, (byte) 1, (byte) 125, (byte) -12) + ); + List> floatQueryVector = List.of(Arrays.asList(1.4f, -1.4f, 0.42f, 0.0f, 1f, -1f, -0.42f, 1.2f)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 124 })); + + List fields = List.of( + new BitMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BIT), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 5 } }), + "test", + ElementType.BIT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + MaxSimDotProduct function = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + -12 + 2 + 4 + 1 + 125, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, floatQueryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + 0.42f + 0f + 1f - 1f - 0.42f, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + Integer.bitCount(124), + function.maxSimDotProduct(), + 0.0 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString( + "query vector contains inner vectors which have incorrect number of dimensions. " + + "Must be [1] for bitwise operations, or [8] for byte wise operations: provided [2]." + ) + ); + } + } + + public void testByteVsFloatSimilarity() throws IOException { + int dims = 5; + float[][] docVector = new float[][] { { 1f, 127f, -128f, 5f, -10f } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> listFloatVector = List.of(Arrays.asList(1f, 125f, -12f, 2f, 4f)); + List> listByteVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + float[][] floatVector = new float[][] { { 1f, 125f, -12f, 2f, 4f } }; + byte[][] byteVector = new byte[][] { { (byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4 } }; + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field1", + ElementType.FLOAT, + dims + ), + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field3", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + int dotProductExpected = 17382; + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, listFloatVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + maxSimDotProduct = new MaxSimDotProduct(scoreScript, listByteVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + switch (field.getElementType()) { + case BYTE -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(byteVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(floatVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(byte[][] queryVector)] instead")); + } + case FLOAT -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(floatVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(byteVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(float[][] queryVector)] instead")); + } + } + } + } + + public void testByteBoundaries() throws IOException { + String fieldName = "vector"; + int dims = 1; + float[] docVector = new float[] { 0 }; + List> greaterThanVector = List.of(List.of(128)); + List> lessThanVector = List.of(List.of(-129)); + List> decimalVector = List.of(List.of(0.5)); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { { docVector } }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 1 } }), + "test", + ElementType.BYTE, + dims + ) + ); + + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + IllegalArgumentException e; + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, greaterThanVector, fieldName)); + assertEquals( + "element_type [byte] vectors only support integers between [-128, 127] but found [128.0] at dim [0]; " + + "Preview of invalid vector: [128.0]", + e.getMessage() + ); + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, lessThanVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support integers between [-128, 127] but found [-129.0] at dim [0]; " + + "Preview of invalid vector: [-129.0]" + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, decimalVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support non-decimal values but found decimal value [0.5] at dim [0]; " + + "Preview of invalid vector: [0.5]" + ); + } + } + + public void testDimMismatch() throws IOException { + + } +} diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java new file mode 100644 index 0000000000000..12f4b931b4d0a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.function.IntFunction; + +public class MultiDenseVectorTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testByteUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + byte[][] docVector = new byte[count][dims]; + float[][] queryVector = new float[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(docVector[i]); + for (int j = 0; j < dims; j++) { + queryVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newByteVector(docVector); + UnsupportedOperationException e; + + e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + public void testFloatUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + float[][] docVector = new float[count][dims]; + byte[][] queryVector = new byte[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(queryVector[i]); + for (int j = 0; j < dims; j++) { + docVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newFloatVector(docVector); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + static MultiDenseVector newFloatVector(float[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new FloatMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static MultiDenseVector newByteVector(byte[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new ByteMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static BytesRef magnitudes(int count, IntFunction magnitude) { + ByteBuffer magnitudeBuffer = ByteBuffer.allocate(count * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < count; i++) { + magnitudeBuffer.putFloat(magnitude.apply(i)); + } + return new BytesRef(magnitudeBuffer.array()); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java index 125b2d20cf9f3..6e9bb596e944b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AdaptingAggregatorTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; @@ -113,7 +114,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[] { null }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java index 8d3fe0f7f6e79..2d0622dbb6322 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorBaseTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -47,7 +48,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { throw new UnsupportedOperationException(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index 9b6ea7272d0f9..e796cee92c0dc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -28,6 +28,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.BucketCollector; @@ -77,7 +79,7 @@ public ScoreMode scoreMode() { collector.preCollection(); indexSearcher.search(termQuery, collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { @@ -91,7 +93,7 @@ public ScoreMode scoreMode() { collector.preCollection(); indexSearcher.search(new MatchAllDocsQuery(), collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { @@ -141,7 +143,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); equalTo(Map.of(0L, List.of(0, 1, 2, 3, 4, 5, 6, 7), 1L, List.of(8), 2L, List.of(9))); }); @@ -158,7 +160,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); assertThat(finalCollector.collection, equalTo(Map.of(0L, List.of(4, 5, 6, 7), 1L, List.of(8), 2L, List.of(9)))); }); @@ -176,12 +178,20 @@ public void collect(int doc, long owningBucketOrd) throws IOException { } } }, (deferringCollector, finalCollector) -> { - deferringCollector.prepareSelectedBuckets(0, 8, 9); + deferringCollector.prepareSelectedBuckets(toLongArray(0, 8, 9)); assertThat(finalCollector.collection, equalTo(Map.of(0L, List.of(0, 1, 2, 3), 1L, List.of(8), 2L, List.of(9)))); }); } + private LongArray toLongArray(long... lons) { + LongArray longArray = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(lons.length); + for (int i = 0; i < lons.length; i++) { + longArray.set(i, lons[i]); + } + return longArray; + } + private void testCase( BiFunction leafCollector, CheckedBiConsumer verify diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java index 80f27b31ca65b..fb4c62ad66f19 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -72,7 +73,7 @@ protected LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCt } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) { return new InternalAggregation[0]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index 2df6a0cfb91ca..a0a24e98ae721 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -68,7 +69,7 @@ public void testReplay() throws Exception { collector.preCollection(); indexSearcher.search(termQuery, collector.asCollector()); collector.postCollection(); - collector.prepareSelectedBuckets(0); + collector.prepareSelectedBuckets(BigArrays.NON_RECYCLING_INSTANCE.newLongArray(1, true)); assertEquals(topDocs.scoreDocs.length, deferredCollectedDocIds.size()); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java index f75f9f474c8e8..2f51a5a09a8ac 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorTests.java @@ -11,22 +11,29 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.aggregations.metrics.TopHits; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.DoubleStream; @@ -37,6 +44,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notANumber; @@ -76,6 +85,35 @@ public void testAggregationSampling() throws IOException { assertThat(avgAvg, closeTo(1.5, 0.5)); } + public void testAggregationSampling_withScores() throws IOException { + long[] counts = new long[5]; + AtomicInteger integer = new AtomicInteger(); + do { + testCase(RandomSamplerAggregatorTests::writeTestDocs, (InternalRandomSampler result) -> { + counts[integer.get()] = result.getDocCount(); + if (result.getDocCount() > 0) { + TopHits agg = result.getAggregations().get("top"); + List hits = Arrays.asList(agg.getHits().getHits()); + assertThat(Strings.toString(result), hits, hasSize(1)); + assertThat(Strings.toString(result), hits.get(0).getScore(), allOf(greaterThan(0.0f), lessThan(1.0f))); + } + }, + new AggTestConfig( + new RandomSamplerAggregationBuilder("my_agg").subAggregation(AggregationBuilders.topHits("top").size(1)) + .setProbability(0.25), + longField(NUMERIC_FIELD_NAME) + ).withQuery( + new BooleanQuery.Builder().add( + new TermQuery(new Term(KEYWORD_FIELD_NAME, KEYWORD_FIELD_VALUE)), + BooleanClause.Occur.SHOULD + ).build() + ) + ); + } while (integer.incrementAndGet() < 5); + long avgCount = LongStream.of(counts).sum() / integer.get(); + assertThat(avgCount, allOf(greaterThanOrEqualTo(20L), lessThanOrEqualTo(70L))); + } + public void testAggregationSamplingNestedAggsScaled() throws IOException { // in case 0 docs get sampled, which can rarely happen // in case the test index has many segments. diff --git a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java index 33978b4cd6b9f..d91b4430e4f94 100644 --- a/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java +++ b/server/src/test/java/org/elasticsearch/search/scroll/RestClearScrollActionTests.java @@ -54,7 +54,7 @@ public void clearScroll(ClearScrollRequest request, ActionListener {})); - // these parameters were previously silently ignored, they will still be ignored in existing indices - String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; - for (String param : unsupportedParameters) { - String mappingAsString = "{\n" - + " \"_doc\" : {\n" - + " \"" - + fieldName() - + "\" : {\n" - + " \"" - + param - + "\" : \"any\"\n" - + " }\n" - + " }\n" - + "}"; - assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); - } - } - - public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { - assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_6_0, IndexVersion.current()); - assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); - MapperService mapperService = createMapperService(version, mapping(b -> {})); - // these parameters were previously silently ignored, they are now deprecated in new indices - String[] unsupportedParameters = new String[] { "fields", "copy_to", "boost", "type" }; - for (String param : unsupportedParameters) { - String mappingAsString = "{\n" - + " \"_doc\" : {\n" - + " \"" - + fieldName() - + "\" : {\n" - + " \"" - + param - + "\" : \"any\"\n" - + " }\n" - + " }\n" - + "}"; - assertNotNull(mapperService.parseMapping("_doc", MergeReason.MAPPING_UPDATE, new CompressedXContent(mappingAsString))); - assertWarnings("Parameter [" + param + "] has no effect on metadata field [" + fieldName() + "] and will be removed in future"); - } - } } diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 40fb4f91c77d0..38c7b1eb04772 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -31,6 +31,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.MockPluginsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.MockReadinessService; import org.elasticsearch.readiness.ReadinessService; @@ -279,10 +280,11 @@ private MockNode( final Collection> classpathPlugins, final boolean forbidPrivateIndexSettings ) { - super(NodeConstruction.prepareConstruction(environment, new MockServiceProvider() { + super(NodeConstruction.prepareConstruction(environment, null, new MockServiceProvider() { + @Override - PluginsService newPluginService(Environment environment, Settings settings) { - return new MockPluginsService(settings, environment, classpathPlugins); + PluginsService newPluginService(Environment environment, PluginsLoader pluginsLoader) { + return new MockPluginsService(environment.settings(), environment, classpathPlugins); } }, forbidPrivateIndexSettings)); diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index e4734f9cf021e..d51b2cfb450bc 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -20,7 +20,6 @@ import org.elasticsearch.plugins.spi.SPIClassIterator; import java.lang.reflect.Constructor; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -44,14 +43,18 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + super(settings, environment.configFile(), new PluginsLoader(environment.modulesFile(), environment.pluginsFile()) { - final Path configPath = environment.configFile(); + @Override + protected void addServerExportsService(Map> qualifiedExports) { + // tests don't run modular + } + }); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, configPath); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", @@ -69,7 +72,7 @@ public MockPluginsService(Settings settings, Environment environment, Collection if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } - pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin, pluginClass.getClassLoader(), ModuleLayer.boot())); + pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin)); } loadExtensions(pluginsLoaded); this.classpathPlugins = List.copyOf(pluginsLoaded); @@ -169,9 +172,4 @@ private static List createExtensions( } return extensions; } - - @Override - protected void addServerExportsService(Map> qualifiedExports) { - // tests don't run modular - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 207409dfcf751..d98b51adce615 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1704,7 +1704,23 @@ protected final BytesReference toShuffledXContent( boolean humanReadable, String... exceptFieldNames ) throws IOException { - BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, params, humanReadable); + return toShuffledXContent(toXContent, xContentType, RestApiVersion.current(), params, humanReadable, exceptFieldNames); + } + + /** + * Returns the bytes that represent the XContent output of the provided {@link ToXContent} object, using the provided + * {@link XContentType}. Wraps the output into a new anonymous object according to the value returned + * by the {@link ToXContent#isFragment()} method returns. Shuffles the keys to make sure that parsing never relies on keys ordering. + */ + protected final BytesReference toShuffledXContent( + ToXContent toXContent, + XContentType xContentType, + RestApiVersion restApiVersion, + ToXContent.Params params, + boolean humanReadable, + String... exceptFieldNames + ) throws IOException { + BytesReference bytes = XContentHelper.toXContent(toXContent, xContentType, restApiVersion, params, humanReadable); try (XContentParser parser = createParser(xContentType.xContent(), bytes)) { try (XContentBuilder builder = shuffleXContent(parser, rarely(), exceptFieldNames)) { return BytesReference.bytes(builder); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 28c9905386091..dd08107bd67fb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -69,6 +69,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; @@ -112,6 +113,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -415,8 +417,7 @@ protected final TestFeatureService createTestFeatureService( logger.warn( "This test is running on the legacy test framework; historical features from production code will not be available. " + "You need to port the test to the new test plugins in order to use historical features from production code. " - + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification such as {}.", - RestTestLegacyFeatures.class.getCanonicalName() + + "If this is a legacy feature used only in tests, you can add it to a test-only FeatureSpecification." ); } return new ESRestTestFeatureService(additionalTestOnlyHistoricalFeatures(), semanticNodeVersions, clusterStateFeatures.values()); @@ -719,10 +720,6 @@ protected boolean preserveTemplatesUponCompletion() { * all feature states, deleting system indices, system associated indices, and system data streams. */ protected boolean resetFeatureStates() { - // ML reset fails when ML is disabled in versions before 8.7 - if (isMlEnabled() == false && clusterHasFeature(RestTestLegacyFeatures.ML_STATE_RESET_FALLBACK_ON_DISABLED) == false) { - return false; - } return true; } @@ -917,50 +914,46 @@ private void wipeCluster() throws Exception { * slows down the test because xpack will just recreate * them. */ - // In case of bwc testing, we need to delete component and composable - // index templates only for clusters that support this historical feature - if (clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED)) { - try { - Request getTemplatesRequest = new Request("GET", "_index_template"); - Map composableIndexTemplates = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), - false - ); - List names = ((List) composableIndexTemplates.get("index_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toList()); - if (names.isEmpty() == false) { - try { - adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); - } + try { + Request getTemplatesRequest = new Request("GET", "_index_template"); + Map composableIndexTemplates = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), + false + ); + List names = ((List) composableIndexTemplates.get("index_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .collect(Collectors.toList()); + if (names.isEmpty() == false) { + try { + adminClient().performRequest(new Request("DELETE", "_index_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple composable index templates %s", names), e); } - } catch (Exception e) { - logger.debug("ignoring exception removing all composable index templates", e); - // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } - try { - Request compReq = new Request("GET", "_component_template"); - String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); - Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); - List names = ((List) cTemplates.get("component_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toList()); - if (names.isEmpty() == false) { - try { - adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); - } catch (ResponseException e) { - logger.warn(() -> format("unable to remove multiple component templates %s", names), e); - } + } catch (Exception e) { + logger.debug("ignoring exception removing all composable index templates", e); + // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore + } + try { + Request compReq = new Request("GET", "_component_template"); + String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); + Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); + List names = ((List) cTemplates.get("component_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .collect(Collectors.toList()); + if (names.isEmpty() == false) { + try { + adminClient().performRequest(new Request("DELETE", "_component_template/" + String.join(",", names))); + } catch (ResponseException e) { + logger.warn(() -> format("unable to remove multiple component templates %s", names), e); } - } catch (Exception e) { - logger.debug("ignoring exception removing all component templates", e); - // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } + } catch (Exception e) { + logger.debug("ignoring exception removing all component templates", e); + // We hit a version of ES that doesn't support index templates v2 yet, so it's safe to ignore } if (has(ProductFeature.LEGACY_TEMPLATES)) { @@ -1058,29 +1051,25 @@ private Set getAllUnexpectedTemplates() throws IOException { Set unexpectedTemplates = new HashSet<>(); if (preserveDataStreamsUponCompletion() == false && preserveTemplatesUponCompletion() == false) { if (has(ProductFeature.XPACK)) { - // In case of bwc testing, we need to delete component and composable - // index templates only for clusters that support this historical feature - if (clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED)) { - Request getTemplatesRequest = new Request("GET", "_index_template"); - Map composableIndexTemplates = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), - false - ); - unexpectedTemplates.addAll( - ((List) composableIndexTemplates.get("index_templates")).stream() - .map(ct -> (String) ((Map) ct).get("name")) - .filter(name -> isXPackTemplate(name) == false) - .collect(Collectors.toSet()) - ); - Request compReq = new Request("GET", "_component_template"); - String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); - Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); - ((List) cTemplates.get("component_templates")).stream() + Request getTemplatesRequest = new Request("GET", "_index_template"); + Map composableIndexTemplates = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(adminClient().performRequest(getTemplatesRequest).getEntity()), + false + ); + unexpectedTemplates.addAll( + ((List) composableIndexTemplates.get("index_templates")).stream() .map(ct -> (String) ((Map) ct).get("name")) .filter(name -> isXPackTemplate(name) == false) - .forEach(unexpectedTemplates::add); - } + .collect(Collectors.toSet()) + ); + Request compReq = new Request("GET", "_component_template"); + String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); + Map cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); + ((List) cTemplates.get("component_templates")).stream() + .map(ct -> (String) ((Map) ct).get("name")) + .filter(name -> isXPackTemplate(name) == false) + .forEach(unexpectedTemplates::add); if (has(ProductFeature.LEGACY_TEMPLATES)) { Request getLegacyTemplatesRequest = new Request("GET", "_template"); @@ -1840,8 +1829,9 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se if (settings != null && settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); + } else if (isSyntheticSourceConfiguredInMapping(mapping)) { + request.setOptions(expectVersionSpecificWarnings(v -> v.compatible(SourceFieldMapper.DEPRECATION_WARNING))); } - final Response response = client.performRequest(request); try (var parser = responseAsParser(response)) { return TestResponseParsers.parseCreateIndexResponse(parser); @@ -1885,6 +1875,27 @@ protected static void expectSoftDeletesWarning(Request request, String indexName })); } + @SuppressWarnings("unchecked") + protected static boolean isSyntheticSourceConfiguredInMapping(String mapping) { + if (mapping == null) { + return false; + } + var mappings = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + mapping.trim().startsWith("{") ? mapping : '{' + mapping + '}', + false + ); + if (mappings.containsKey("_doc")) { + mappings = (Map) mappings.get("_doc"); + } + Map sourceMapper = (Map) mappings.get(SourceFieldMapper.NAME); + if (sourceMapper == null) { + return false; + } + Object mode = sourceMapper.get("mode"); + return mode != null && mode.toString().toLowerCase(Locale.ROOT).equals("synthetic"); + } + protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); @@ -2282,7 +2293,7 @@ protected static Map> getClusterStateFeatures(RestClient adm */ protected static IndexVersion minimumIndexVersion() throws IOException { final Request request = new Request("GET", "_nodes"); - request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version"); + request.addParameter("filter_path", "nodes.*.version,nodes.*.max_index_version,nodes.*.index_version"); final Response response = adminClient().performRequest(request); final Map nodes = ObjectPath.createFromResponse(response).evaluate("nodes"); @@ -2290,10 +2301,13 @@ protected static IndexVersion minimumIndexVersion() throws IOException { IndexVersion minVersion = null; for (Map.Entry node : nodes.entrySet()) { Map nodeData = (Map) node.getValue(); - String versionStr = (String) nodeData.get("max_index_version"); + Object versionStr = nodeData.get("index_version"); + if (versionStr == null) { + versionStr = nodeData.get("max_index_version"); + } // fallback on version if index version is not there IndexVersion indexVersion = versionStr != null - ? IndexVersion.fromId(Integer.parseInt(versionStr)) + ? IndexVersion.fromId(Integer.parseInt(versionStr.toString())) : IndexVersion.fromId( parseLegacyVersion((String) nodeData.get("version")).map(Version::id).orElse(IndexVersions.MINIMUM_COMPATIBLE.id()) ); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index 66c24f157ddfe..cd3406e7ddac5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -55,7 +55,6 @@ class ESRestTestFeatureService implements TestFeatureService { ESRestTestFeatureService(List featureSpecs, Set nodeVersions, Collection> nodeFeatures) { List specs = new ArrayList<>(featureSpecs); - specs.add(new RestTestLegacyFeatures()); if (MetadataHolder.HISTORICAL_FEATURES != null) { specs.add(MetadataHolder.HISTORICAL_FEATURES); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java deleted file mode 100644 index a10394b4156d6..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.test.rest; - -import org.elasticsearch.Version; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.features.FeatureSpecification; -import org.elasticsearch.features.NodeFeature; - -import java.util.Map; - -import static java.util.Map.entry; - -/** - * This class groups historical features that have been removed from the production codebase, but are still used by the test - * framework to support BwC tests. Rather than leaving them in the main src we group them here, so it's clear they are not used in - * production code anymore. - */ -public class RestTestLegacyFeatures implements FeatureSpecification { - public static final NodeFeature ML_STATE_RESET_FALLBACK_ON_DISABLED = new NodeFeature("ml.state_reset_fallback_on_disabled"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature COMPONENT_TEMPLATE_SUPPORTED = new NodeFeature("indices.component_template_supported"); - public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); - - // Ref: https://github.com/elastic/elasticsearch/pull/86416 - public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); - - // QA - rolling upgrade tests - public static final NodeFeature DESIRED_NODE_API_SUPPORTED = new NodeFeature("desired_node_supported"); - public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); - public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); - - public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); - public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); - - public static final NodeFeature TSDB_DOWNSAMPLING_STABLE = new NodeFeature("indices.tsdb_downsampling_stable"); - - /* - * A composable index template with no template defined in the body is mistakenly always assumed to not be a time series template. - * Fixed in #98840 - */ - public static final NodeFeature TSDB_EMPTY_TEMPLATE_FIXED = new NodeFeature("indices.tsdb_empty_composable_template_fixed"); - public static final NodeFeature SYNTHETIC_SOURCE_SUPPORTED = new NodeFeature("indices.synthetic_source"); - - public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_SUPPORTED = new NodeFeature("allocator.desired_balance"); - - /* - * Cancel shard allocation command is broken for initial desired balance versions - * and might allocate shard on the node where it is not supposed to be. This - * is fixed by https://github.com/elastic/elasticsearch/pull/93635. - */ - public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_FIXED = new NodeFeature("allocator.desired_balance_fixed"); - public static final NodeFeature INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED = new NodeFeature("settings.indexing_slowlog_level_removed"); - public static final NodeFeature DEPRECATION_WARNINGS_LEAK_FIXED = new NodeFeature("deprecation_warnings_leak_fixed"); - - // QA - Full cluster restart - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature REPLICATION_OF_CLOSED_INDICES = new NodeFeature("indices.closed_replication_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SOFT_DELETES_ENFORCED = new NodeFeature("indices.soft_deletes_enforced"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature NEW_TRANSPORT_COMPRESSED_SETTING = new NodeFeature("transport.new_compressed_setting"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SERVICE_ACCOUNTS_SUPPORTED = new NodeFeature("auth.service_accounts_supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature TRANSFORM_SUPPORTED = new NodeFeature("transform.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature SLM_SUPPORTED = new NodeFeature("slm.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DATA_STREAMS_SUPPORTED = new NodeFeature("data_stream.supported"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature NEW_DATA_STREAMS_INDEX_NAME_FORMAT = new NodeFeature("data_stream.new_index_name_format"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature DISABLE_FIELD_NAMES_FIELD_REMOVED = new NodeFeature("disable_of_field_names_field_removed"); - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final NodeFeature ML_NLP_SUPPORTED = new NodeFeature("ml.nlp_supported"); - - // YAML - public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - entry(COMPONENT_TEMPLATE_SUPPORTED, Version.V_7_8_0), - entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), - entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), - entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), - entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), - entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), - entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), - entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), - entry(TSDB_NEW_INDEX_FORMAT, Version.V_8_2_0), - entry(SYNTHETIC_SOURCE_SUPPORTED, Version.V_8_4_0), - entry(DESIRED_BALANCED_ALLOCATOR_SUPPORTED, Version.V_8_6_0), - entry(DESIRED_BALANCED_ALLOCATOR_FIXED, Version.V_8_7_1), - entry(TSDB_GENERALLY_AVAILABLE, Version.V_8_7_0), - entry(TSDB_DOWNSAMPLING_STABLE, Version.V_8_10_0), - entry(TSDB_EMPTY_TEMPLATE_FIXED, Version.V_8_11_0), - entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), - entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9), - entry(REPLICATION_OF_CLOSED_INDICES, Version.V_7_2_0), - entry(SOFT_DELETES_ENFORCED, Version.V_8_0_0), - entry(NEW_TRANSPORT_COMPRESSED_SETTING, Version.V_7_14_0), - entry(SERVICE_ACCOUNTS_SUPPORTED, Version.V_7_13_0), - entry(TRANSFORM_SUPPORTED, Version.V_7_2_0), - entry(SLM_SUPPORTED, Version.V_7_4_0), - entry(DATA_STREAMS_SUPPORTED, Version.V_7_9_0), - entry(NEW_DATA_STREAMS_INDEX_NAME_FORMAT, Version.V_7_11_0), - entry(DISABLE_FIELD_NAMES_FIELD_REMOVED, Version.V_8_0_0), - entry(ML_NLP_SUPPORTED, Version.V_8_0_0) - ); - } -} diff --git a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java index e230982073699..d810f17ae552e 100644 --- a/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java +++ b/test/metadata-extractor/src/test/java/org/elasticsearch/extractor/features/HistoricalFeaturesMetadataExtractorTests.java @@ -27,7 +27,6 @@ import java.util.Set; import static org.elasticsearch.xcontent.XContentParserConfiguration.EMPTY; -import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasEntry; @@ -48,7 +47,7 @@ public void testExtractHistoricalMetadata() throws IOException { nodeFeatureVersionMap.putAll(historical); featureNamesSet.addAll(names); }); - assertThat(nodeFeatureVersionMap, not(anEmptyMap())); + // assertThat(nodeFeatureVersionMap, not(anEmptyMap())); assertThat(featureNamesSet, not(empty())); assertThat(featureNamesSet, hasItem("test_features_enabled")); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 22449ca763d09..11787866af0d7 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -18,8 +18,7 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - ML_SCALE_FROM_ZERO("es.ml_scale_from_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null); + SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; public final Version from; diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 86c3f42a6a8ec..627554f6b261d 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -20,8 +20,8 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.core.Tuple; import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponseException; @@ -371,13 +371,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx ? executionContext.getClientYamlTestCandidate().getTestPath() : null; - var fixedProductionHeader = executionContext.clusterHasFeature( - RestTestLegacyFeatures.REST_ELASTIC_PRODUCT_HEADER_PRESENT.id(), - false - ); - if (fixedProductionHeader) { - checkElasticProductHeader(response.getHeaders("X-elastic-product")); - } + checkElasticProductHeader(response.getHeaders("X-elastic-product")); checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { checkResponseException(e, executionContext); @@ -502,6 +496,8 @@ public void checkWarningHeaders(final List warningHeaders, String testPa } } + unexpected.removeIf(s -> s.endsWith(SourceFieldMapper.DEPRECATION_WARNING + "\"")); + if (unexpected.isEmpty() == false || unmatched.isEmpty() == false || missing.isEmpty() == false diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 85882a5c56851..1691aedf543f4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -20,6 +20,8 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.util.ObjectArrayPriorityQueue; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; @@ -235,57 +237,62 @@ protected void doClose() { } @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalMultiTerms.Bucket[][] topBucketsPerOrd = new InternalMultiTerms.Bucket[owningBucketOrds.length][]; - long[] otherDocCounts = new long[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); - - int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); - try ( - ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( - size, - bigArrays(), - partiallyBuiltBucketComparator - ) - ) { - InternalMultiTerms.Bucket spare = null; - BytesRef spareKey = null; - BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); - while (ordsEnum.next()) { - long docCount = bucketDocCount(ordsEnum.ord()); - otherDocCounts[ordIdx] += docCount; - if (docCount < bucketCountThresholds.getShardMinDocCount()) { - continue; - } - if (spare == null) { - spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); - spareKey = new BytesRef(); + public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + try ( + LongArray otherDocCounts = bigArrays().newLongArray(owningBucketOrds.size(), true); + ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(owningBucketOrds.size()) + ) { + for (long ordIdx = 0; ordIdx < owningBucketOrds.size(); ordIdx++) { + final long owningBucketOrd = owningBucketOrds.get(ordIdx); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrd); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + try ( + ObjectArrayPriorityQueue ordered = new BucketPriorityQueue<>( + size, + bigArrays(), + partiallyBuiltBucketComparator + ) + ) { + InternalMultiTerms.Bucket spare = null; + BytesRef spareKey = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts.increment(ordIdx, docCount); + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + checkRealMemoryCBForInternalBucket(); + spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); + spareKey = new BytesRef(); + } + ordsEnum.readValue(spareKey); + spare.terms = unpackTerms(spareKey); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); } - ordsEnum.readValue(spareKey); - spare.terms = unpackTerms(spareKey); - spare.docCount = docCount; - spare.bucketOrd = ordsEnum.ord(); - spare = ordered.insertWithOverflow(spare); - } - // Get the top buckets - InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; - topBucketsPerOrd[ordIdx] = bucketsForOrd; - for (int b = (int) ordered.size() - 1; b >= 0; --b) { - topBucketsPerOrd[ordIdx][b] = ordered.pop(); - otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + // Get the top buckets + InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[(int) ordered.size()]; + topBucketsPerOrd.set(ordIdx, bucketsForOrd); + for (int b = (int) ordered.size() - 1; b >= 0; --b) { + InternalMultiTerms.Bucket[] buckets = topBucketsPerOrd.get(ordIdx); + buckets[b] = ordered.pop(); + otherDocCounts.increment(ordIdx, -buckets[b].getDocCount()); + } } } - } - buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { - result[ordIdx] = buildResult(otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } - return result; } InternalMultiTerms buildResult(long otherDocCount, InternalMultiTerms.Bucket[] topBuckets) { @@ -305,7 +312,7 @@ InternalMultiTerms buildResult(long otherDocCount, InternalMultiTerms.Bucket[] t bucketCountThresholds.getShardSize(), showTermDocCountError, otherDocCount, - List.of(topBuckets), + Arrays.asList(topBuckets), 0, formats, keyConverters, diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml index ac6462c86676c..a5a3a7433f4c1 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@mappings.yaml @@ -4,6 +4,7 @@ _meta: managed: true template: mappings: + date_detection: false dynamic: true dynamic_templates: - numeric_labels: diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 7190686aa073c..26040529b04df 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -87,7 +87,6 @@ tasks.named("precommit").configure { } tasks.named("yamlRestCompatTestTransform").configure({ task -> - task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry) snapshot version", "The number of functions is constantly increasing") @@ -95,5 +94,6 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/80_text/reverse text", "The output type changed from TEXT to KEYWORD.") task.skipTest("esql/80_text/values function", "The output type changed from TEXT to KEYWORD.") task.skipTest("privileges/11_builtin/Test get builtin privileges" ,"unnecessary to test compatibility") + task.skipTest("esql/61_enrich_ip/Invalid IP strings", "We switched from exceptions to null+warnings for ENRICH runtime errors") }) diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index 11661342c14a2..86f974ed13359 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -6,6 +6,11 @@ */ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -17,6 +22,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register("leader-cluster") { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -30,7 +37,19 @@ def followCluster = testClusters.register("follow-cluster") { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -47,7 +66,7 @@ tasks.register("writeJavaPolicy") { policyFile.write( [ "grant {", - " permission java.io.FilePermission \"${-> testClusters."follow-cluster".getFirstNode().getServerLog()}\", \"read\";", + " permission java.io.FilePermission \"${-> followCluster.map { it.getFirstNode().getServerLog() }.get()}\", \"read\";", "};" ].join("\n") ) @@ -56,11 +75,28 @@ tasks.register("writeJavaPolicy") { tasks.register("follow-cluster", RestIntegTestTask) { dependsOn 'writeJavaPolicy', "leader-cluster" - useCluster leaderCluster - systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) + useCluster leaderCluster + systemProperty 'tests.target_cluster', 'follow' + nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllHttpSocketURI().get(0) } + def followerUri = followInfo.map { it.getAllHttpSocketURI().get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index d1d59446a2264..61678784e6b38 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -7,6 +7,11 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -18,6 +23,7 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -27,12 +33,23 @@ def leaderCluster = testClusters.register('leader-cluster') { } def middleCluster = testClusters.register('middle-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -46,30 +63,74 @@ tasks.register("middle-cluster", RestIntegTestTask) { useCluster testClusters.named("leader-cluster") systemProperty 'tests.target_cluster', 'middle' systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host',leaderCluster.map(c -> c.allHttpSocketURI.get(0)) -} + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri +} tasks.register('follow-cluster', RestIntegTestTask) { dependsOn "leader-cluster", "middle-cluster" - useCluster leaderCluster - useCluster middleCluster - systemProperty 'tests.target_cluster', 'follow' - systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'tests.middle_host', middleCluster.map(c -> c.allHttpSocketURI.get(0)) + useCluster leaderCluster + useCluster middleCluster + systemProperty 'tests.target_cluster', 'follow' + systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def middleUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.middle_host', middleUri } -testClusters.matching {it.name == "follow-cluster" }.configureEach { +testClusters.matching { it.name == "follow-cluster" }.configureEach { testDistribution = 'DEFAULT' setting 'xpack.monitoring.collection.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + + def middleUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE setting 'cluster.remote.middle_cluster.seeds', - { "\"${middleCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${middleUris.get().join(",")}\"" }, IGNORE_VALUE } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 53e068ae6126e..0bb4afe51b85a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.RestStatus; @@ -366,8 +367,10 @@ public void testSyntheticSource() throws Exception { final String leaderIndexName = "synthetic_leader"; if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); - createIndex(adminClient(), leaderIndexName, Settings.EMPTY, """ - "_source": {"mode": "synthetic"}, + Settings settings = Settings.builder() + .put(SourceFieldMapper.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), SourceFieldMapper.Mode.SYNTHETIC) + .build(); + createIndex(adminClient(), leaderIndexName, settings, """ "properties": {"kwd": {"type": "keyword"}}}""", null); for (int i = 0; i < numDocs; i++) { logger.info("Indexing doc [{}]", i); @@ -392,7 +395,6 @@ public void testSyntheticSource() throws Exception { } assertBusy(() -> { verifyDocuments(client(), followIndexName, numDocs); - assertMap(getIndexMappingAsMap(followIndexName), matchesMap().extraOk().entry("_source", Map.of("mode", "synthetic"))); if (overrideNumberOfReplicas) { assertMap(getIndexSettingsAsMap(followIndexName), matchesMap().extraOk().entry("index.number_of_replicas", "0")); } else { diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index 7661ea08b057d..ff342accef277 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -1,5 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -10,6 +14,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa:') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'true' @@ -21,8 +27,20 @@ def followerCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register('leader-cluster', RestIntegTestTask) { @@ -34,7 +52,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', followerCluster.map(c -> c.allHttpSocketURI.get(0)) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def followUri = followInfo.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', followUri } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index 47d37801e2dcf..848beb1da10ae 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -1,6 +1,10 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -9,6 +13,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -22,12 +28,23 @@ def followCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllTransportPortURI().get(0) } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().get(0)}\"" }, IGNORE_VALUE + { "\"${leaderUri.get()}\"" }, IGNORE_VALUE nameCustomization = { 'follow' } } - tasks.register('leader-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.target_cluster', 'leader' @@ -37,8 +54,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', - "${-> leaderCluster.get().getAllHttpSocketURI().get(0)}" + "${-> leaderUri.get() }" } tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { @@ -48,10 +76,27 @@ tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { systemProperty 'tests.rest.load_packaged', 'false' systemProperty 'tests.target_cluster', 'follow-restart' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def followUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.join(",") } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.rest.cluster', followUris + doFirst { - followCluster.get().restart() - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c-> c.getAllHttpSocketURI().get(0)) - nonInputProperties.systemProperty 'tests.rest.cluster', followCluster.map(c -> c.getAllHttpSocketURI().join(",")) + serviceProvider.get().restart(clusterPath, "follow-cluster") } } diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index 5515aefeaa091..454a9ae721736 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -1,4 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -10,26 +15,38 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leadCluster = testClusters.register('leader-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - extraConfigFile 'roles.yml', file('leader-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + extraConfigFile 'roles.yml', file('leader-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } testClusters.register('follow-cluster') { - testDistribution = 'DEFAULT' - setting 'cluster.remote.leader_cluster.seeds', { - "\"${leadCluster.get().getAllTransportPortURI().join(",")}\"" - }, IGNORE_VALUE - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests - extraConfigFile 'roles.yml', file('follower-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.AllTransportPortURI } + + setting 'cluster.remote.leader_cluster.seeds', { + "\"${leaderUris.get().join(",")}\"" + }, IGNORE_VALUE + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests + extraConfigFile 'roles.yml', file('follower-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } tasks.register('leader-cluster', RestIntegTestTask) { @@ -41,7 +58,17 @@ def followerClusterTestTask = tasks.register('follow-cluster', RestIntegTestTask dependsOn 'leader-cluster' useCluster leadCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', leadCluster.map(c-> c.getAllHttpSocketURI().get(0)) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri } tasks.named("check").configure { dependsOn(followerClusterTestTask) } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java index cd44aaafbfae2..05eb7551330b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java @@ -130,11 +130,11 @@ public boolean equals(Object obj) { * still result in unique snapshot names. */ public static String generateSnapshotName(String name) { - return generateSnapshotName(name, new IndexNameExpressionResolver.ResolverContext()); + return generateSnapshotName(name, System.currentTimeMillis()); } - public static String generateSnapshotName(String name, IndexNameExpressionResolver.Context context) { - String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, context.getStartTime()); + public static String generateSnapshotName(String name, long now) { + String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, now); // TODO: we are breaking the rules of UUIDs by lowercasing this here, find an alternative (snapshot names must be lowercase) return candidate + "-" + UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java index f8cb371687d72..26f4f5c92073c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/IndexLifecycleMetadata.java @@ -58,10 +58,22 @@ public class IndexLifecycleMetadata implements Metadata.Custom { private final Map policyMetadatas; private final OperationMode operationMode; + // a slightly different view of the policyMetadatas -- it's hot in a couple of places so we pre-calculate it + private final Map policies; + + private static Map policiesMap(final Map policyMetadatas) { + final Map policies = new HashMap<>(policyMetadatas.size()); + for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { + LifecyclePolicy policy = policyMetadata.getPolicy(); + policies.put(policy.getName(), policy); + } + return Collections.unmodifiableMap(policies); + } public IndexLifecycleMetadata(Map policies, OperationMode operationMode) { this.policyMetadatas = Collections.unmodifiableMap(policies); this.operationMode = operationMode; + this.policies = policiesMap(policyMetadatas); } public IndexLifecycleMetadata(StreamInput in) throws IOException { @@ -72,6 +84,7 @@ public IndexLifecycleMetadata(StreamInput in) throws IOException { } this.policyMetadatas = policies; this.operationMode = in.readEnum(OperationMode.class); + this.policies = policiesMap(policyMetadatas); } @Override @@ -93,13 +106,7 @@ public OperationMode getOperationMode() { } public Map getPolicies() { - // note: this loop is unrolled rather than streaming-style because it's hot enough to show up in a flamegraph - Map policies = new HashMap<>(policyMetadatas.size()); - for (LifecyclePolicyMetadata policyMetadata : policyMetadatas.values()) { - LifecyclePolicy policy = policyMetadata.getPolicy(); - policies.put(policy.getName(), policy); - } - return Collections.unmodifiableMap(policies); + return policies; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java index f4ac89124cddb..68e0f7e1ac885 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfig.java @@ -76,12 +76,6 @@ public TextExpansionConfig( this.vocabularyConfig = Optional.ofNullable(vocabularyConfig) .orElse(new VocabularyConfig(InferenceIndexConstants.nativeDefinitionStore())); this.tokenization = tokenization == null ? Tokenization.createDefault() : tokenization; - if (this.tokenization instanceof BertTokenization == false) { - throw ExceptionsHelper.badRequestException( - "text expansion models must be configured with BERT tokenizer, [{}] given", - this.tokenization.getName() - ); - } this.resultsField = resultsField; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java index 83f7832645270..ad8a55a5f8443 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/InferenceProcessorInfoExtractor.java @@ -51,7 +51,7 @@ public static int countInferenceProcessors(ClusterState state) { } Counter counter = Counter.newCounter(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = (List>) configMap.get(PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -73,7 +73,7 @@ public static Set getModelIdsFromInferenceProcessors(IngestMetadata inge Set modelIds = new LinkedHashSet<>(); ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -100,7 +100,7 @@ public static Map> pipelineIdsByResource(ClusterState state, return pipelineIdsByModelIds; } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { @@ -131,7 +131,7 @@ public static Set pipelineIdsForResource(ClusterState state, Set return pipelineIds; } ingestMetadata.getPipelines().forEach((pipelineId, configuration) -> { - Map configMap = configuration.getConfigAsMap(); + Map configMap = configuration.getConfig(); List> processorConfigs = readList(configMap, PROCESSORS_KEY); for (Map processorConfigWithKey : processorConfigs) { for (Map.Entry entry : processorConfigWithKey.entrySet()) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 259e66f633bac..cc589b53eaa1a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -484,6 +484,22 @@ static RoleDescriptor kibanaSystem(String name) { // Endpoint heartbeat. Kibana reads from these to determine metering/billing for // endpoints. RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.heartbeat-*").privileges("read", "create_index").build(), + // Security Solution workflows insights. Kibana creates, manages, and uses these + // to provide users with insights on potential configuration improvements + RoleDescriptor.IndicesPrivileges.builder() + .indices(".edr-workflow-insights-*") + .privileges( + "create_index", + "auto_configure", + "manage", + "read", + "write", + "delete", + TransportUpdateSettingsAction.TYPE.name(), + TransportPutMappingAction.TYPE.name(), + RolloverAction.NAME + ) + .build(), // For connectors telemetry. Will be removed once we switched to connectors API RoleDescriptor.IndicesPrivileges.builder().indices(".elastic-connectors*").privileges("read").build() }, null, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index 05f4e560b73c1..f160b704e9e12 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -580,14 +580,16 @@ private void addIndexLifecyclePoliciesIfMissing(ClusterState state) { logger.trace("running in data stream lifecycle only mode. skipping the installation of ILM policies."); return; } - IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE); + final IndexLifecycleMetadata metadata = state.metadata().custom(IndexLifecycleMetadata.TYPE); + final Map policies = metadata != null ? metadata.getPolicies() : Map.of(); + for (LifecyclePolicy policy : getLifecyclePolicies()) { final AtomicBoolean creationCheck = policyCreationsInProgress.computeIfAbsent( policy.getName(), key -> new AtomicBoolean(false) ); if (creationCheck.compareAndSet(false, true)) { - final LifecyclePolicy currentPolicy = metadata != null ? metadata.getPolicies().get(policy.getName()) : null; + final LifecyclePolicy currentPolicy = policies.get(policy.getName()); if (Objects.isNull(currentPolicy)) { logger.debug("adding lifecycle policy [{}] for [{}], because it doesn't exist", policy.getName(), getOrigin()); putPolicy(policy, creationCheck); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index ce8cd5ae46ace..bee6351582bc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; @@ -185,13 +184,12 @@ public void testNameGeneration() { assertThat(generateSnapshotName("name"), startsWith("name-")); assertThat(generateSnapshotName("name").length(), greaterThan("name-".length())); - IndexNameExpressionResolver.ResolverContext resolverContext = new IndexNameExpressionResolver.ResolverContext(time); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.15-")); - assertThat(generateSnapshotName("", resolverContext).length(), greaterThan("name-2019.03.15-".length())); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.15-")); + assertThat(generateSnapshotName("", time).length(), greaterThan("name-2019.03.15-".length())); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.01-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.01-")); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019-03-15.21:09:00-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019-03-15.21:09:00-")); } public void testNameValidation() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java index cf4630899ab53..a91cceec8a167 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextExpansionConfigTests.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.inference.InferenceConfigItemTestCase; @@ -67,13 +65,4 @@ protected TextExpansionConfig doParseInstance(XContentParser parser) throws IOEx protected TextExpansionConfig mutateInstanceForVersion(TextExpansionConfig instance, TransportVersion version) { return instance; } - - public void testBertTokenizationOnly() { - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> new TextExpansionConfig(null, RobertaTokenizationTests.createRandom(), null) - ); - assertEquals(RestStatus.BAD_REQUEST, e.status()); - assertEquals("text expansion models must be configured with BERT tokenizer, [roberta] given", e.getMessage()); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java index 218876c7d40e8..3ca6777512420 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptorTests.java @@ -31,9 +31,12 @@ import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; +import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissionGroup; import org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; +import org.elasticsearch.xpack.core.security.authz.restriction.Workflow; +import org.elasticsearch.xpack.core.security.authz.restriction.WorkflowResolver; import org.hamcrest.Matchers; import java.io.IOException; @@ -47,7 +50,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.SECURITY_ROLE_DESCRIPTION; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptor.WORKFLOWS_RESTRICTION_VERSION; -import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivileges; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomIndicesPrivilegesBuilder; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTestHelper.randomRemoteClusterPermissions; import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; @@ -1338,38 +1340,191 @@ public void testIsEmpty() { } } - public void testHasPrivilegesOtherThanIndex() { + public void testHasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster() { + // any index and some cluster privileges are allowed assertThat( new RoleDescriptor( "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), // all of these are allowed + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(false) + ); + // any index and some cluster privileges are allowed + assertThat( + new RoleDescriptor( + "name", + new String[] { "manage_security" }, // unlikely we will ever support allowing manage security across clusters + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // application privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + new ApplicationResourcePrivileges[] { + ApplicationResourcePrivileges.builder().application("app").privileges("foo").resources("res").build() }, + null, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // configurable cluster privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) }, + null, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // run as is not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + new String[] { "foo" }, + null, + null, + null, + null, + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // workflows restriction is not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, null, - randomBoolean() ? null : randomIndicesPrivileges(1, 5), null, null, null, null, null, null, + new RoleDescriptor.Restriction(WorkflowResolver.allWorkflows().stream().map(Workflow::name).toArray(String[]::new)), + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + // remote indices privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + new RoleDescriptor.RemoteIndicesPrivileges[] { + RoleDescriptor.RemoteIndicesPrivileges.builder("rmt").indices("idx").privileges("foo").build() }, null, null, null ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + // remote cluster privileges are not allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + null, + null, + null, + new RemoteClusterPermissions().addGroup( + new RemoteClusterPermissionGroup( + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new String[] { "rmt" } + ) + ), + null, + null + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), + is(true) + ); + + // metadata, transient metadata and description are allowed + assertThat( + new RoleDescriptor( + "name", + RemoteClusterPermissions.getSupportedRemoteClusterPermissions().toArray(new String[0]), + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("idx").privileges("foo").build() }, + null, + null, + null, + Collections.singletonMap("foo", "bar"), + Collections.singletonMap("foo", "bar"), + null, + null, + null, + "description" + ).hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), is(false) ); - final RoleDescriptor roleDescriptor = RoleDescriptorTestHelper.builder() - .allowReservedMetadata(true) - .allowRemoteIndices(true) - .allowRestriction(true) - .allowDescription(true) - .allowRemoteClusters(true) - .build(); - final boolean expected = roleDescriptor.hasClusterPrivileges() - || roleDescriptor.hasConfigurableClusterPrivileges() - || roleDescriptor.hasApplicationPrivileges() - || roleDescriptor.hasRunAs() - || roleDescriptor.hasRemoteIndicesPrivileges() - || roleDescriptor.hasWorkflowsRestriction(); - assertThat(roleDescriptor.hasUnsupportedPrivilegesInsideAPIKeyConnectedRemoteCluster(), equalTo(expected)); } private static void resetFieldPermssionsCache() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 9818a890d465f..17579fd6368ce 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -1106,6 +1106,28 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); }); + // index for Security Solution workflow insights + Arrays.asList(".edr-workflow-insights-" + randomAlphaOfLength(randomIntBetween(0, 13))).forEach((index) -> { + final IndexAbstraction indexAbstraction = mockIndexAbstraction(index); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportCreateIndexAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(indexAbstraction), is(false)); + assertThat( + kibanaRole.indices().allowedIndicesMatcher(TransportUpdateSettingsAction.TYPE.name()).test(indexAbstraction), + is(true) + ); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportPutMappingAction.TYPE.name()).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(RolloverAction.NAME).test(indexAbstraction), is(true)); + }); + // Data telemetry reads mappings, metadata and stats of indices Arrays.asList(randomAlphaOfLengthBetween(8, 24), "packetbeat-*").forEach((index) -> { logger.info("index name [{}]", index); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java index e396712cbc360..356fac4539137 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistryTests.java @@ -726,7 +726,7 @@ private static void assertPutPipelineAction( putRequest.getSource(), putRequest.getXContentType() ); - List processors = (List) pipelineConfiguration.getConfigAsMap().get("processors"); + List processors = (List) pipelineConfiguration.getConfig().get("processors"); assertThat(processors, hasSize(1)); Map setProcessor = (Map) ((Map) processors.get(0)).get("set"); assertNotNull(setProcessor.get("field")); diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java index 7cddd7e037742..512955a5fe2eb 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java @@ -56,7 +56,7 @@ static boolean exists(ClusterState clusterState) { if (ingestMetadata != null) { final PipelineConfiguration pipeline = ingestMetadata.getPipelines().get(pipelineName()); if (pipeline != null) { - Object version = pipeline.getConfigAsMap().get("version"); + Object version = pipeline.getConfig().get("version"); return version instanceof Number number && number.intValue() >= ENRICH_PIPELINE_LAST_UPDATED_VERSION; } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java index 681b14e8be61c..1099603e9be07 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandlerTests.java @@ -61,7 +61,7 @@ public List routes() { }; FakeRestRequest fakeRestRequest = new FakeRestRequest(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, isLicensed ? 0 : 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), isLicensed ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java index 45f42a754910d..53debedafc3d8 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Attribute.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -34,11 +33,6 @@ public abstract class Attribute extends NamedExpression { */ protected static final String SYNTHETIC_ATTRIBUTE_NAME_PREFIX = "$$"; - public static List getNamedWriteables() { - // TODO add UnsupportedAttribute when these are moved to the same project - return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); - } - // can the attr be null - typically used in JOINs private final Nullability nullability; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java index 0be7f65d767c7..00765a8c0528c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; @@ -15,7 +14,6 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.StringUtils; -import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -29,14 +27,6 @@ * (which is a type of expression) with a single child, c. */ public abstract class Expression extends Node implements Resolvable { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - for (NamedWriteableRegistry.Entry e : NamedExpression.getNamedWriteables()) { - entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (NamedExpression) e.reader.read(in))); - } - entries.add(Literal.ENTRY); - return entries; - } public static class TypeResolution { private final boolean failed; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java new file mode 100644 index 0000000000000..174a0321a3057 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionCoreWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.addAll(expressions()); + entries.addAll(namedExpressions()); + entries.addAll(attributes()); + return entries; + } + + public static List expressions() { + List entries = new ArrayList<>(); + // add entries as expressions + for (NamedWriteableRegistry.Entry e : namedExpressions()) { + entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (Expression) e.reader.read(in))); + } + entries.add(Literal.ENTRY); + return entries; + } + + public static List namedExpressions() { + List entries = new ArrayList<>(); + // add entries as named writeables + for (NamedWriteableRegistry.Entry e : attributes()) { + entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); + } + entries.add(Alias.ENTRY); + return entries; + } + + public static List attributes() { + return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java index 3b018f09e5ebd..f425fc7110a41 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/NamedExpression.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.esql.core.expression; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.xpack.esql.core.tree.Source; -import java.util.ArrayList; import java.util.List; import java.util.Objects; @@ -20,14 +18,6 @@ * (by converting to an attribute). */ public abstract class NamedExpression extends Expression implements NamedWriteable { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - for (NamedWriteableRegistry.Entry e : Attribute.getNamedWriteables()) { - entries.add(new NamedWriteableRegistry.Entry(NamedExpression.class, e.name, in -> (NamedExpression) e.reader.read(in))); - } - entries.add(Alias.ENTRY); - return entries; - } private final String name; private final NameId id; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java index cad5c631088f2..a1afcdbf1f77c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/function/Function.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.esql.core.expression.function; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; @@ -42,6 +43,11 @@ public Nullability nullable() { return Expressions.nullable(children()); } + /** Return true if this function can be executed under the provided {@link XPackLicenseState}, otherwise false.*/ + public boolean checkLicense(XPackLicenseState state) { + return true; + } + @Override public int hashCode() { return Objects.hash(getClass(), children()); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java index 29a567e83211d..b23593804f8fe 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/FullTextPredicate.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -23,10 +22,6 @@ public abstract class FullTextPredicate extends Expression { - public static List getNamedWriteables() { - return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, StringQueryPredicate.ENTRY); - } - public enum Operator { AND, OR; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java index d84a471815a9a..61b480968e974 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/plugin/EsqlCorePlugin.java @@ -12,7 +12,6 @@ import org.elasticsearch.plugins.Plugin; public class EsqlCorePlugin extends Plugin implements ExtensiblePlugin { - public static final FeatureFlag DATE_NANOS_FEATURE_FLAG = new FeatureFlag("esql_date_nanos"); public static final FeatureFlag SEMANTIC_TEXT_FEATURE_FLAG = new FeatureFlag("esql_semantic_text"); } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java index 347e6b43099fc..1c65dd386667f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/type/DataType.java @@ -209,7 +209,6 @@ public enum DataType { * check that sending them to a function produces a sane error message. */ public static final Map UNDER_CONSTRUCTION = Map.ofEntries( - Map.entry(DATE_NANOS, EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), Map.entry(SEMANTIC_TEXT, EsqlCorePlugin.SEMANTIC_TEXT_FEATURE_FLAG) ); @@ -591,6 +590,13 @@ public DataType noText() { return isString(this) ? KEYWORD : this; } + public boolean isDate() { + return switch (this) { + case DATETIME, DATE_NANOS -> true; + default -> false; + }; + } + /** * Named parameters with default values. It's just easier to do this with * a builder in java.... diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index d6d9c9bd96b70..f92c895cc5b7b 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -89,7 +89,6 @@ tasks.named("test").configure { } } File functionsFolder = file("build/testrun/test/temp/esql/functions") - File signatureFolder = file("build/testrun/test/temp/esql/functions/signature") File typesFolder = file("build/testrun/test/temp/esql/functions/types") def functionsDocFolder = file("${rootDir}/docs/reference/esql/functions") def effectiveProjectDir = projectDir diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 9e48bc13cdafa..1e06cf1ea4450 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; @@ -18,8 +17,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.BlockLoader; -import java.util.List; - /** * A Block is a columnar representation of homogenous data. It has a position (row) count, and * various data retrieval methods for accessing the underlying data that is stored at a given @@ -291,19 +288,6 @@ static Block[] buildAll(Block.Builder... builders) { } } - static List getNamedWriteables() { - return List.of( - IntBlock.ENTRY, - LongBlock.ENTRY, - FloatBlock.ENTRY, - DoubleBlock.ENTRY, - BytesRefBlock.ENTRY, - BooleanBlock.ENTRY, - ConstantNullBlock.ENTRY, - CompositeBlock.ENTRY - ); - } - /** * Serialization type for blocks: 0 and 1 replace false/true used in pre-8.14 */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java new file mode 100644 index 0000000000000..ff9139e57e52e --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockWritables.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class BlockWritables { + + public static List getNamedWriteables() { + return List.of( + IntBlock.ENTRY, + LongBlock.ENTRY, + FloatBlock.ENTRY, + DoubleBlock.ENTRY, + BytesRefBlock.ENTRY, + BooleanBlock.ENTRY, + ConstantNullBlock.ENTRY, + CompositeBlock.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java index 9198de53b1e04..6dca94b9bc79a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/sort/BytesRefBucketedSort.java @@ -147,7 +147,7 @@ public void merge(int bucket, BytesRefBucketedSort other, int otherBucket) { // The value was never collected. return; } - other.checkInvariant(bucket); + other.checkInvariant(otherBucket); long otherStart = other.startIndex(otherBucket, otherRootIndex); long otherEnd = other.common.endIndex(otherRootIndex); // TODO: This can be improved for heapified buckets by making use of the heap structures diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java index 2093094fb8af5..0cd34d2ad4066 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperator.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -38,17 +39,25 @@ public final class EnrichQuerySourceOperator extends SourceOperator { private int queryPosition = -1; private final IndexReader indexReader; private final IndexSearcher searcher; + private final Warnings warnings; private final int maxPageSize; // using smaller pages enables quick cancellation and reduces sorting costs public static final int DEFAULT_MAX_PAGE_SIZE = 256; - public EnrichQuerySourceOperator(BlockFactory blockFactory, int maxPageSize, QueryList queryList, IndexReader indexReader) { + public EnrichQuerySourceOperator( + BlockFactory blockFactory, + int maxPageSize, + QueryList queryList, + IndexReader indexReader, + Warnings warnings + ) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.queryList = queryList; this.indexReader = indexReader; this.searcher = new IndexSearcher(indexReader); + this.warnings = warnings; } @Override @@ -73,12 +82,18 @@ public Page getOutput() { } int totalMatches = 0; do { - Query query = nextQuery(); - if (query == null) { - assert isFinished(); - break; + Query query; + try { + query = nextQuery(); + if (query == null) { + assert isFinished(); + break; + } + query = searcher.rewrite(new ConstantScoreQuery(query)); + } catch (Exception e) { + warnings.registerException(e); + continue; } - query = searcher.rewrite(new ConstantScoreQuery(query)); final var weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1.0f); if (weight == null) { continue; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java index e72c34fdb5f7a..d76e58d1c8a30 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java @@ -29,7 +29,7 @@ public abstract class SerializationTestCase extends ESTestCase { BigArrays bigArrays; protected BlockFactory blockFactory; - NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables()); + NamedWriteableRegistry registry = new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); @Before public final void newBlockFactory() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index f857f50b2d30f..339c2bba2a734 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -363,6 +363,52 @@ public final void testMergeEmptyToEmpty() { } } + public final void testMergeOtherBigger() { + try (T sort = build(SortOrder.ASC, 3)) { + var values = threeSortedValues(); + + collect(sort, values.get(0), 0); + collect(sort, values.get(1), 0); + collect(sort, values.get(2), 0); + + try (T other = build(SortOrder.ASC, 3)) { + collect(other, values.get(0), 0); + collect(other, values.get(1), 1); + collect(other, values.get(2), 2); + + merge(sort, 0, other, 0); + merge(sort, 0, other, 1); + merge(sort, 0, other, 2); + } + + assertBlock(sort, 0, List.of(values.get(0), values.get(0), values.get(1))); + } + } + + public final void testMergeThisBigger() { + try (T sort = build(SortOrder.ASC, 3)) { + var values = threeSortedValues(); + + collect(sort, values.get(0), 0); + collect(sort, values.get(1), 1); + collect(sort, values.get(2), 2); + + try (T other = build(SortOrder.ASC, 3)) { + collect(other, values.get(0), 0); + collect(other, values.get(1), 0); + collect(other, values.get(2), 0); + + merge(sort, 0, other, 0); + merge(sort, 1, other, 0); + merge(sort, 2, other, 0); + } + + assertBlock(sort, 0, List.of(values.get(0), values.get(0), values.get(1))); + assertBlock(sort, 1, List.of(values.get(0), values.get(1), values.get(1))); + assertBlock(sort, 2, values); + } + } + protected void assertBlock(T sort, int groupId, List values) { var blockFactory = TestBlockFactory.getNonBreakingInstance(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 9e07f9c8f5faf..0b1ecce8c375b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -22,8 +22,8 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; @@ -457,7 +457,7 @@ public void sendResponse(TransportResponse transportResponse) { private MockTransportService newTransportService() { List namedWriteables = new ArrayList<>(ClusterModule.getNamedWriteables()); - namedWriteables.addAll(Block.getNamedWriteables()); + namedWriteables.addAll(BlockWritables.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); MockTransportService service = MockTransportService.createNewService( Settings.EMPTY, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java index 6daace76dd8b8..2af52b6bab5a8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java @@ -32,6 +32,8 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -120,7 +122,8 @@ public void testQueries() throws Exception { // 3 -> [] -> [] // 4 -> [a1] -> [3] // 5 -> [] -> [] - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, 128, queryList, reader, warnings); Page p0 = queryOperator.getOutput(); assertNotNull(p0); assertThat(p0.getPositionCount(), equalTo(6)); @@ -187,7 +190,8 @@ public void testRandomMatchQueries() throws Exception { MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid"); var queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms); int maxPageSize = between(1, 256); - EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader); + var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich"); + EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(blockFactory, maxPageSize, queryList, reader, warnings); Map> actualPositions = new HashMap<>(); while (queryOperator.isFinished() == false) { Page page = queryOperator.getOutput(); diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 60eecbb7658b7..5df85d1004dd1 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -47,6 +47,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1; import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC; @@ -124,6 +125,7 @@ protected void shouldSkipTest(String testName) throws IOException { assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName())); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName())); + assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP.capabilityName())); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index d124fdb5755c3..6f45c9d92fd12 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.index.mapper.BlockLoader; @@ -1456,16 +1457,12 @@ private static void index(String name, String... docs) throws IOException { } private static void createIndex(String name, CheckedConsumer mapping) throws IOException { - Request request = new Request("PUT", "/" + name); XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject(); - index.startObject("mappings"); mapping.accept(index); index.endObject(); - index.endObject(); String configStr = Strings.toString(index); logger.info("index: {} {}", name, configStr); - request.setJsonEntity(configStr); - client().performRequest(request); + ESRestTestCase.createIndex(name, Settings.EMPTY, configStr); } /** diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 478c68db68aa7..0d6659ad37a27 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -93,6 +93,8 @@ public class CsvTestsDataLoader { private static final TestsDataset BOOKS = new TestsDataset("books"); private static final TestsDataset SEMANTIC_TEXT = new TestsDataset("semantic_text").withInferenceEndpoint(true); + private static final String LOOKUP_INDEX_SUFFIX = "_lookup"; + public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), Map.entry(HOSTS.indexName, HOSTS), @@ -128,7 +130,9 @@ public class CsvTestsDataLoader { Map.entry(DISTANCES.indexName, DISTANCES), Map.entry(ADDRESSES.indexName, ADDRESSES), Map.entry(BOOKS.indexName, BOOKS), - Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT) + Map.entry(SEMANTIC_TEXT.indexName, SEMANTIC_TEXT), + // JOIN LOOKUP alias + Map.entry(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX, LANGUAGES.withIndex(LANGUAGES.indexName + LOOKUP_INDEX_SUFFIX)) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 2913401d8aab3..d6715a932c075 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -29,6 +29,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -342,7 +343,7 @@ public String toString() { public static final Configuration TEST_CFG = configuration(new QueryPragmas(Settings.EMPTY)); - public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry())); + public static final Verifier TEST_VERIFIER = new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)); private EsqlTestUtils() {} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index 3be3decaf351c..7bbf011176693 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -760,3 +760,19 @@ c:long |b:date 3 |2025-10-01T00:00:00.000Z 4 |2023-11-01T00:00:00.000Z ; + +bucketWithFilteredCountRefingBucket +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| STATS c = COUNT(*) WHERE b > "1953-01-01T00:00:00.000Z" AND emp_no > 10020 BY b = BUCKET(birth_date, 1 year) +| SORT c, b +| LIMIT 4 +; + +c:long |b:date +0 |1952-01-01T00:00:00.000Z +0 |1953-01-01T00:00:00.000Z +0 |null +1 |1965-01-01T00:00:00.000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/inlinestats.csv-spec-ignored diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec new file mode 100644 index 0000000000000..605bf78c20a32 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -0,0 +1,48 @@ +// +// CSV spec for LOOKUP JOIN command +// Reuses the sample dataset and commands from enrich.csv-spec +// + +basicOnTheDataNode +required_capability: join_lookup + +//TODO: this returns different results in CI then locally +// sometimes null, sometimes spanish (likely related to the execution order) +FROM employees +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| WHERE emp_no < 500 +| KEEP emp_no, language_name +| SORT emp_no +| LIMIT 1 +; + +emp_no:integer | language_name:keyword +//10091 | Spanish +; + +basicRow-Ignore +required_capability: join_lookup + +ROW language_code = 1 +| LOOKUP JOIN languages_lookup ON language_code +; + +language_code:keyword | language_name:keyword +1 | English +; + +basicOnTheCoordinator +required_capability: join_lookup + +FROM employees +| SORT emp_no +| LIMIT 1 +| EVAL language_code = languages +| LOOKUP JOIN languages_lookup ON language_code +| KEEP emp_no, language_name +; + +emp_no:integer | language_name:keyword +10001 | French +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored similarity index 80% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored index 9cf96f7c0b6de..685e3ab2778e1 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup.csv-spec-ignored @@ -4,7 +4,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME int AS languages, name AS lang_name | KEEP emp_no, languages, lang_name ; @@ -19,7 +19,7 @@ emp_no:integer | languages:integer | lang_name:keyword keywordByMvIntAndQuotedSource required_capability: lookup_v4 ROW int=[1, 2, 3] -| LOOKUP "int_number_names" ON int +| LOOKUP_🐔 "int_number_names" ON int ; int:integer | name:keyword @@ -29,7 +29,7 @@ int:integer | name:keyword keywordByDupeIntAndTripleQuotedSource required_capability: lookup_v4 ROW int=[1, 1, 1] -| LOOKUP """int_number_names""" ON int +| LOOKUP_🐔 """int_number_names""" ON int ; int:integer | name:keyword @@ -39,10 +39,10 @@ int:integer | name:keyword intByKeyword required_capability: lookup_v4 ROW name="two" -| LOOKUP int_number_names ON name +| LOOKUP_🐔 int_number_names ON name ; -name:keyword | int:integer +name:keyword | int:integer two | 2 ; @@ -53,7 +53,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages.long AS long -| LOOKUP long_number_names ON long +| LOOKUP_🐔 long_number_names ON long | RENAME long AS languages, name AS lang_name | KEEP emp_no, languages, lang_name ; @@ -68,7 +68,7 @@ emp_no:integer | languages:long | lang_name:keyword longByKeyword required_capability: lookup_v4 ROW name="two" -| LOOKUP long_number_names ON name +| LOOKUP_🐔 long_number_names ON name ; name:keyword | long:long @@ -81,7 +81,7 @@ FROM employees | SORT emp_no | LIMIT 4 | RENAME height AS double -| LOOKUP double_number_names ON double +| LOOKUP_🐔 double_number_names ON double | RENAME double AS height, name AS height_name | KEEP emp_no, height, height_name ; @@ -96,7 +96,7 @@ emp_no:integer | height:double | height_name:keyword floatByKeyword required_capability: lookup_v4 ROW name="two point zero eight" -| LOOKUP double_number_names ON name +| LOOKUP_🐔 double_number_names ON name ; name:keyword | double:double @@ -106,7 +106,7 @@ two point zero eight | 2.08 floatByNullMissing required_capability: lookup_v4 ROW name=null -| LOOKUP double_number_names ON name +| LOOKUP_🐔 double_number_names ON name ; name:null | double:double @@ -116,7 +116,7 @@ name:null | double:double floatByNullMatching required_capability: lookup_v4 ROW name=null -| LOOKUP double_number_names_with_null ON name +| LOOKUP_🐔 double_number_names_with_null ON name ; name:null | double:double @@ -126,7 +126,7 @@ name:null | double:double intIntByKeywordKeyword required_capability: lookup_v4 ROW aa="foo", ab="zoo" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -136,7 +136,7 @@ foo | zoo | 1 | -1 intIntByKeywordKeywordMissing required_capability: lookup_v4 ROW aa="foo", ab="zoi" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -146,7 +146,7 @@ foo | zoi | null | null intIntByKeywordKeywordSameValues required_capability: lookup_v4 ROW aa="foo", ab="foo" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -156,7 +156,7 @@ foo | foo | 2 | -2 intIntByKeywordKeywordSameValuesMissing required_capability: lookup_v4 ROW aa="bar", ab="bar" -| LOOKUP big ON aa, ab +| LOOKUP_🐔 big ON aa, ab ; aa:keyword | ab:keyword | na:integer | nb:integer @@ -168,7 +168,7 @@ lookupBeforeStats-Ignore required_capability: lookup_v4 FROM employees | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | STATS height=ROUND(AVG(height), 3) BY languages | SORT height ASC; @@ -178,7 +178,7 @@ height:double | languages:keyword 1.732 | one 1.762 | two 1.764 | three - 1.809 | null + 1.809 | null 1.847 | five ; @@ -186,14 +186,14 @@ lookupAfterStats required_capability: lookup_v4 FROM employees | STATS int=TO_INT(AVG(height)) -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | KEEP name; name:keyword two ; -// Makes sure the LOOKUP squashes previous names +// Makes sure the LOOKUP_🐔 squashes previous names doesNotDuplicateNames required_capability: lookup_v4 FROM employees @@ -201,7 +201,7 @@ FROM employees | LIMIT 4 | RENAME languages.long AS long | EVAL name = CONCAT(first_name, " ", last_name) -| LOOKUP long_number_names ON long +| LOOKUP_🐔 long_number_names ON long | RENAME long AS languages | KEEP emp_no, languages, name ; @@ -219,7 +219,7 @@ required_capability: lookup_v4 FROM employees | WHERE emp_no < 10005 | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no | SORT languages ASC, emp_no ASC @@ -238,7 +238,7 @@ FROM employees | WHERE emp_no < 10005 | SORT languages ASC, emp_no ASC | RENAME languages AS int -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no ; @@ -256,7 +256,7 @@ FROM employees | KEEP emp_no | WHERE emp_no == 10001 | EVAL left = "left", int = emp_no - 10000, name = "name", right = "right" -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int ; emp_no:integer | left:keyword | int:integer | right:keyword | name:keyword @@ -269,65 +269,57 @@ FROM employees | KEEP emp_no | WHERE emp_no == 10001 | EVAL left = "left", nb = -10011+emp_no, na = "na", middle = "middle", ab = "ab", aa = "bar", right = "right" -| LOOKUP big ON aa, nb +| LOOKUP_🐔 big ON aa, nb ; -emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right:keyword | ab:keyword | na:integer +emp_no:integer | left:keyword | nb:integer | middle:keyword | aa:keyword | right:keyword | ab:keyword | na:integer 10001 | left | -10 | middle | bar | right | zop | 10 ; // -// Make sure that the new LOOKUP syntax doesn't clash with any existing things -// named "lookup" +// Make sure that the new LOOKUP_🐔 syntax doesn't clash with any existing things +// named "lookup_🐔" // -rowNamedLookup -required_capability: lookup_v4 -ROW lookup = "a" -; - -lookup:keyword - a -; rowNamedLOOKUP required_capability: lookup_v4 -ROW LOOKUP = "a" +ROW lookup_🐔 = "a" ; -LOOKUP:keyword +lookup_🐔:keyword a ; evalNamedLookup required_capability: lookup_v4 -ROW a = "a" | EVAL lookup = CONCAT(a, "1") +ROW a = "a" | EVAL lookup_🐔 = CONCAT(a, "1") ; -a:keyword | lookup:keyword +a:keyword | lookup_🐔:keyword a | a1 ; dissectNamedLookup required_capability: lookup_v4 -row a = "foo bar" | dissect a "foo %{lookup}"; +row a = "foo bar" | dissect a "foo %{lookup_🐔}"; -a:keyword | lookup:keyword +a:keyword | lookup_🐔:keyword foo bar | bar ; renameIntoLookup required_capability: lookup_v4 -row a = "foo bar" | RENAME a AS lookup; +row a = "foo bar" | RENAME a AS lookup_🐔; -lookup:keyword +lookup_🐔:keyword foo bar ; sortOnLookup required_capability: lookup_v4 -ROW lookup = "a" | SORT lookup +ROW lookup_🐔 = "a" | SORT lookup_🐔 ; -lookup:keyword +lookup_🐔:keyword a ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 7a046786a4f19..cba5ace0dfe86 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2642,6 +2642,26 @@ c2:l |c2_f:l |m2:i |m2_f:i |c:l 1 |1 |5 |5 |21 ; +simpleCountOnFieldWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(emp_no) where emp_no < 10042 +; + +c1:long +41 +; + +simpleCountOnStarWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(*) where emp_no < 10042 +; + +c1:long +41 +; + commonFilterExtractionWithAliasing required_capability: per_agg_filtering from employees diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index a2fd3f3d5e0da..af987b13acc82 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -718,6 +718,7 @@ count:long | @timestamp:date multiIndexTsNanosToDatetimeStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATETIME(@timestamp)) @@ -734,6 +735,8 @@ count:long | @timestamp:date Multi Index millis to nanos stats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos +required_capability: date_trunc_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATE_NANOS(@timestamp)) @@ -752,6 +755,7 @@ multiIndexTsLongStatsDrop required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -772,6 +776,7 @@ multiIndexTsLongStatsInline2 required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -915,6 +920,7 @@ multiIndexIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -956,6 +962,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -998,6 +1005,7 @@ multiIndexIpStringTsLongRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1039,6 +1047,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1081,6 +1090,7 @@ multiIndexIpStringTsLongRenameToString required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_STRING(TO_DATETIME(@timestamp)), host_ip = TO_STRING(TO_IP(client_ip)) @@ -1123,6 +1133,7 @@ multiIndexWhereIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1139,6 +1150,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1155,6 +1167,7 @@ multiIndexWhereIpStringLikeTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1171,6 +1184,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringLikeTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1187,6 +1201,7 @@ multiIndexMultiColumnTypesRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1205,6 +1220,7 @@ multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1224,6 +1240,7 @@ multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1515,7 +1532,7 @@ FROM sample_data, sample_data_ts_long null | 172.21.0.5 | 1232382 | Disconnected | 8268153 ; -multiIndexIndirectUseOfUnionTypesInLookup +multiIndexIndirectUseOfUnionTypesInLookup-Ignore // TODO: `union_types` is required only because this makes the test skip in the csv tests; better solution: // make the csv tests work with multiple indices. required_capability: union_types @@ -1524,7 +1541,7 @@ FROM sample_data, sample_data_ts_long | SORT client_ip ASC | LIMIT 1 | EVAL int = (event_duration - 1232380)::integer -| LOOKUP int_number_names ON int +| LOOKUP_🐔 int_number_names ON int ; @timestamp:null | client_ip:ip | event_duration:long | message:keyword | int:integer | name:keyword diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java new file mode 100644 index 0000000000000..3a69983a0d86e --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Make sure the failures on the data node come back as failures over the wire. + */ +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class); + } + + /** + * Use a runtime field that fails when loading field values to fail the entire query. + */ + public void testFailureLoadingFields() throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("fail_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "fail").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get(); + + int docCount = 100; + List docs = new ArrayList<>(docCount); + for (int d = 0; d < docCount; d++) { + docs.add(client().prepareIndex("ok").setSource("foo", d)); + } + docs.add(client().prepareIndex("fail").setSource("foo", 0)); + indexRandom(true, docs); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> run("FROM fail,ok | LIMIT 100").close()); + assertThat(e.getMessage(), equalTo("test failure")); + } + + public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "fail"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new ElasticsearchException("test failure"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java index cff9604053903..5c0c13b48df3b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java @@ -183,7 +183,8 @@ public void testLookupIndex() throws IOException { DataType.KEYWORD, "lookup", "data", - List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))) + List.of(new Alias(Source.EMPTY, "l", new ReferenceAttribute(Source.EMPTY, "l", DataType.LONG))), + Source.EMPTY ); DriverContext driverContext = driverContext(); try ( diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index b86c46fd3fa7a..3b647583f1129 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -14,9 +14,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.junit.Before; import java.util.List; @@ -32,12 +29,6 @@ public void setupIndex() { createAndPopulateIndex(); } - @Override - protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("match operator capability not available", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - return super.run(request); - } - public void testSimpleWhereMatch() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 6ec93d203d984..ef875d7ca01d8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -85,8 +85,15 @@ WHERE : 'where' -> pushMode(EXPRESSION_MODE); // main section while preserving alphabetical order: // MYCOMMAND : 'mycommand' -> ... DEV_INLINESTATS : {this.isDevVersion()}? 'inlinestats' -> pushMode(EXPRESSION_MODE); -DEV_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(LOOKUP_MODE); +DEV_LOOKUP : {this.isDevVersion()}? 'lookup_🐔' -> pushMode(LOOKUP_MODE); DEV_METRICS : {this.isDevVersion()}? 'metrics' -> pushMode(METRICS_MODE); +// list of all JOIN commands +DEV_JOIN : {this.isDevVersion()}? 'join' -> pushMode(JOIN_MODE); +DEV_JOIN_FULL : {this.isDevVersion()}? 'full' -> pushMode(JOIN_MODE); +DEV_JOIN_LEFT : {this.isDevVersion()}? 'left' -> pushMode(JOIN_MODE); +DEV_JOIN_RIGHT : {this.isDevVersion()}? 'right' -> pushMode(JOIN_MODE); +DEV_JOIN_LOOKUP : {this.isDevVersion()}? 'lookup' -> pushMode(JOIN_MODE); + // // Catch-all for unrecognized commands - don't define any beyond this line @@ -105,8 +112,6 @@ WS : [ \r\n\t]+ -> channel(HIDDEN) ; -COLON : ':'; - // // Expression - used by most command // @@ -177,6 +182,7 @@ AND : 'and'; ASC : 'asc'; ASSIGN : '='; CAST_OP : '::'; +COLON : ':'; COMMA : ','; DESC : 'desc'; DOT : '.'; @@ -209,7 +215,6 @@ MINUS : '-'; ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -EXPRESSION_COLON : {this.isDevVersion()}? COLON -> type(COLON); NESTED_WHERE : WHERE -> type(WHERE); @@ -543,6 +548,31 @@ LOOKUP_FIELD_WS : WS -> channel(HIDDEN) ; +// +// JOIN-related commands +// +mode JOIN_MODE; +JOIN_PIPE : PIPE -> type(PIPE), popMode; +JOIN_JOIN : DEV_JOIN -> type(DEV_JOIN); +JOIN_AS : AS -> type(AS); +JOIN_ON : ON -> type(ON), popMode, pushMode(EXPRESSION_MODE); +USING : 'USING' -> popMode, pushMode(EXPRESSION_MODE); + +JOIN_UNQUOTED_IDENTIFER: UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER); +JOIN_QUOTED_IDENTIFIER : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER); + +JOIN_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +JOIN_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +JOIN_WS + : WS -> channel(HIDDEN) + ; + // // METRICS command // diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 3dd1a2c754038..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -17,106 +17,115 @@ WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 DEV_METRICS=19 -UNKNOWN_CMD=20 -LINE_COMMENT=21 -MULTILINE_COMMENT=22 -WS=23 -COLON=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -NAMED_OR_POSITIONAL_PARAM=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -EXPLAIN_WS=72 -EXPLAIN_LINE_COMMENT=73 -EXPLAIN_MULTILINE_COMMENT=74 -METADATA=75 -UNQUOTED_SOURCE=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -MVEXPAND_LINE_COMMENT=97 -MVEXPAND_MULTILINE_COMMENT=98 -MVEXPAND_WS=99 -INFO=100 -SHOW_LINE_COMMENT=101 -SHOW_MULTILINE_COMMENT=102 -SHOW_WS=103 -SETTING=104 -SETTING_LINE_COMMENT=105 -SETTTING_MULTILINE_COMMENT=106 -SETTING_WS=107 -LOOKUP_LINE_COMMENT=108 -LOOKUP_MULTILINE_COMMENT=109 -LOOKUP_WS=110 -LOOKUP_FIELD_LINE_COMMENT=111 -LOOKUP_FIELD_MULTILINE_COMMENT=112 -LOOKUP_FIELD_WS=113 -METRICS_LINE_COMMENT=114 -METRICS_MULTILINE_COMMENT=115 -METRICS_WS=116 -CLOSING_METRICS_LINE_COMMENT=117 -CLOSING_METRICS_MULTILINE_COMMENT=118 -CLOSING_METRICS_WS=119 +DEV_JOIN=20 +DEV_JOIN_FULL=21 +DEV_JOIN_LEFT=22 +DEV_JOIN_RIGHT=23 +DEV_JOIN_LOOKUP=24 +UNKNOWN_CMD=25 +LINE_COMMENT=26 +MULTILINE_COMMENT=27 +WS=28 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 +COMMA=39 +DESC=40 +DOT=41 +FALSE=42 +FIRST=43 +IN=44 +IS=45 +LAST=46 +LIKE=47 +LP=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +EXPLAIN_WS=77 +EXPLAIN_LINE_COMMENT=78 +EXPLAIN_MULTILINE_COMMENT=79 +METADATA=80 +UNQUOTED_SOURCE=81 +FROM_LINE_COMMENT=82 +FROM_MULTILINE_COMMENT=83 +FROM_WS=84 +ID_PATTERN=85 +PROJECT_LINE_COMMENT=86 +PROJECT_MULTILINE_COMMENT=87 +PROJECT_WS=88 +AS=89 +RENAME_LINE_COMMENT=90 +RENAME_MULTILINE_COMMENT=91 +RENAME_WS=92 +ON=93 +WITH=94 +ENRICH_POLICY_NAME=95 +ENRICH_LINE_COMMENT=96 +ENRICH_MULTILINE_COMMENT=97 +ENRICH_WS=98 +ENRICH_FIELD_LINE_COMMENT=99 +ENRICH_FIELD_MULTILINE_COMMENT=100 +ENRICH_FIELD_WS=101 +MVEXPAND_LINE_COMMENT=102 +MVEXPAND_MULTILINE_COMMENT=103 +MVEXPAND_WS=104 +INFO=105 +SHOW_LINE_COMMENT=106 +SHOW_MULTILINE_COMMENT=107 +SHOW_WS=108 +SETTING=109 +SETTING_LINE_COMMENT=110 +SETTTING_MULTILINE_COMMENT=111 +SETTING_WS=112 +LOOKUP_LINE_COMMENT=113 +LOOKUP_MULTILINE_COMMENT=114 +LOOKUP_WS=115 +LOOKUP_FIELD_LINE_COMMENT=116 +LOOKUP_FIELD_MULTILINE_COMMENT=117 +LOOKUP_FIELD_WS=118 +USING=119 +JOIN_LINE_COMMENT=120 +JOIN_MULTILINE_COMMENT=121 +JOIN_WS=122 +METRICS_LINE_COMMENT=123 +METRICS_MULTILINE_COMMENT=124 +METRICS_WS=125 +CLOSING_METRICS_LINE_COMMENT=126 +CLOSING_METRICS_MULTILINE_COMMENT=127 +CLOSING_METRICS_WS=128 'dissect'=1 'drop'=2 'enrich'=3 @@ -133,46 +142,47 @@ CLOSING_METRICS_WS=119 'sort'=14 'stats'=15 'where'=16 -':'=24 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=66 -'metadata'=75 -'as'=84 -'on'=88 -'with'=89 -'info'=100 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 +','=39 +'desc'=40 +'.'=41 +'false'=42 +'first'=43 +'in'=44 +'is'=45 +'last'=46 +'like'=47 +'('=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=80 +'as'=89 +'on'=93 +'with'=94 +'info'=105 +'USING'=119 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 67f194a1bff64..f84cfe3060503 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -54,6 +54,7 @@ processingCommand // in development | {this.isDevVersion()}? inlinestatsCommand | {this.isDevVersion()}? lookupCommand + | {this.isDevVersion()}? joinCommand ; whereCommand @@ -68,7 +69,7 @@ booleanExpression | left=booleanExpression operator=OR right=booleanExpression #logicalBinary | valueExpression (NOT)? IN LP valueExpression (COMMA valueExpression)* RP #logicalIn | valueExpression IS NOT? NULL #isNull - | {this.isDevVersion()}? matchBooleanExpression #matchExpression + | matchBooleanExpression #matchExpression ; regexBooleanExpression @@ -322,3 +323,19 @@ lookupCommand inlinestatsCommand : DEV_INLINESTATS stats=aggFields (BY grouping=fields)? ; + +joinCommand + : type=(DEV_JOIN_LOOKUP | DEV_JOIN_LEFT | DEV_JOIN_RIGHT)? DEV_JOIN joinTarget joinCondition + ; + +joinTarget + : index=identifier (AS alias=identifier)? + ; + +joinCondition + : ON joinPredicate (COMMA joinPredicate)* + ; + +joinPredicate + : valueExpression + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 3dd1a2c754038..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -17,106 +17,115 @@ WHERE=16 DEV_INLINESTATS=17 DEV_LOOKUP=18 DEV_METRICS=19 -UNKNOWN_CMD=20 -LINE_COMMENT=21 -MULTILINE_COMMENT=22 -WS=23 -COLON=24 -PIPE=25 -QUOTED_STRING=26 -INTEGER_LITERAL=27 -DECIMAL_LITERAL=28 -BY=29 -AND=30 -ASC=31 -ASSIGN=32 -CAST_OP=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -IN=39 -IS=40 -LAST=41 -LIKE=42 -LP=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -NAMED_OR_POSITIONAL_PARAM=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -EXPLAIN_WS=72 -EXPLAIN_LINE_COMMENT=73 -EXPLAIN_MULTILINE_COMMENT=74 -METADATA=75 -UNQUOTED_SOURCE=76 -FROM_LINE_COMMENT=77 -FROM_MULTILINE_COMMENT=78 -FROM_WS=79 -ID_PATTERN=80 -PROJECT_LINE_COMMENT=81 -PROJECT_MULTILINE_COMMENT=82 -PROJECT_WS=83 -AS=84 -RENAME_LINE_COMMENT=85 -RENAME_MULTILINE_COMMENT=86 -RENAME_WS=87 -ON=88 -WITH=89 -ENRICH_POLICY_NAME=90 -ENRICH_LINE_COMMENT=91 -ENRICH_MULTILINE_COMMENT=92 -ENRICH_WS=93 -ENRICH_FIELD_LINE_COMMENT=94 -ENRICH_FIELD_MULTILINE_COMMENT=95 -ENRICH_FIELD_WS=96 -MVEXPAND_LINE_COMMENT=97 -MVEXPAND_MULTILINE_COMMENT=98 -MVEXPAND_WS=99 -INFO=100 -SHOW_LINE_COMMENT=101 -SHOW_MULTILINE_COMMENT=102 -SHOW_WS=103 -SETTING=104 -SETTING_LINE_COMMENT=105 -SETTTING_MULTILINE_COMMENT=106 -SETTING_WS=107 -LOOKUP_LINE_COMMENT=108 -LOOKUP_MULTILINE_COMMENT=109 -LOOKUP_WS=110 -LOOKUP_FIELD_LINE_COMMENT=111 -LOOKUP_FIELD_MULTILINE_COMMENT=112 -LOOKUP_FIELD_WS=113 -METRICS_LINE_COMMENT=114 -METRICS_MULTILINE_COMMENT=115 -METRICS_WS=116 -CLOSING_METRICS_LINE_COMMENT=117 -CLOSING_METRICS_MULTILINE_COMMENT=118 -CLOSING_METRICS_WS=119 +DEV_JOIN=20 +DEV_JOIN_FULL=21 +DEV_JOIN_LEFT=22 +DEV_JOIN_RIGHT=23 +DEV_JOIN_LOOKUP=24 +UNKNOWN_CMD=25 +LINE_COMMENT=26 +MULTILINE_COMMENT=27 +WS=28 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 +COMMA=39 +DESC=40 +DOT=41 +FALSE=42 +FIRST=43 +IN=44 +IS=45 +LAST=46 +LIKE=47 +LP=48 +NOT=49 +NULL=50 +NULLS=51 +OR=52 +PARAM=53 +RLIKE=54 +RP=55 +TRUE=56 +EQ=57 +CIEQ=58 +NEQ=59 +LT=60 +LTE=61 +GT=62 +GTE=63 +PLUS=64 +MINUS=65 +ASTERISK=66 +SLASH=67 +PERCENT=68 +NAMED_OR_POSITIONAL_PARAM=69 +OPENING_BRACKET=70 +CLOSING_BRACKET=71 +UNQUOTED_IDENTIFIER=72 +QUOTED_IDENTIFIER=73 +EXPR_LINE_COMMENT=74 +EXPR_MULTILINE_COMMENT=75 +EXPR_WS=76 +EXPLAIN_WS=77 +EXPLAIN_LINE_COMMENT=78 +EXPLAIN_MULTILINE_COMMENT=79 +METADATA=80 +UNQUOTED_SOURCE=81 +FROM_LINE_COMMENT=82 +FROM_MULTILINE_COMMENT=83 +FROM_WS=84 +ID_PATTERN=85 +PROJECT_LINE_COMMENT=86 +PROJECT_MULTILINE_COMMENT=87 +PROJECT_WS=88 +AS=89 +RENAME_LINE_COMMENT=90 +RENAME_MULTILINE_COMMENT=91 +RENAME_WS=92 +ON=93 +WITH=94 +ENRICH_POLICY_NAME=95 +ENRICH_LINE_COMMENT=96 +ENRICH_MULTILINE_COMMENT=97 +ENRICH_WS=98 +ENRICH_FIELD_LINE_COMMENT=99 +ENRICH_FIELD_MULTILINE_COMMENT=100 +ENRICH_FIELD_WS=101 +MVEXPAND_LINE_COMMENT=102 +MVEXPAND_MULTILINE_COMMENT=103 +MVEXPAND_WS=104 +INFO=105 +SHOW_LINE_COMMENT=106 +SHOW_MULTILINE_COMMENT=107 +SHOW_WS=108 +SETTING=109 +SETTING_LINE_COMMENT=110 +SETTTING_MULTILINE_COMMENT=111 +SETTING_WS=112 +LOOKUP_LINE_COMMENT=113 +LOOKUP_MULTILINE_COMMENT=114 +LOOKUP_WS=115 +LOOKUP_FIELD_LINE_COMMENT=116 +LOOKUP_FIELD_MULTILINE_COMMENT=117 +LOOKUP_FIELD_WS=118 +USING=119 +JOIN_LINE_COMMENT=120 +JOIN_MULTILINE_COMMENT=121 +JOIN_WS=122 +METRICS_LINE_COMMENT=123 +METRICS_MULTILINE_COMMENT=124 +METRICS_WS=125 +CLOSING_METRICS_LINE_COMMENT=126 +CLOSING_METRICS_MULTILINE_COMMENT=127 +CLOSING_METRICS_WS=128 'dissect'=1 'drop'=2 'enrich'=3 @@ -133,46 +142,47 @@ CLOSING_METRICS_WS=119 'sort'=14 'stats'=15 'where'=16 -':'=24 -'|'=25 -'by'=29 -'and'=30 -'asc'=31 -'='=32 -'::'=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'in'=39 -'is'=40 -'last'=41 -'like'=42 -'('=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=66 -'metadata'=75 -'as'=84 -'on'=88 -'with'=89 -'info'=100 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 +','=39 +'desc'=40 +'.'=41 +'false'=42 +'first'=43 +'in'=44 +'is'=45 +'last'=46 +'like'=47 +'('=48 +'not'=49 +'null'=50 +'nulls'=51 +'or'=52 +'?'=53 +'rlike'=54 +')'=55 +'true'=56 +'=='=57 +'=~'=58 +'!='=59 +'<'=60 +'<='=61 +'>'=62 +'>='=63 +'+'=64 +'-'=65 +'*'=66 +'/'=67 +'%'=68 +']'=71 +'metadata'=80 +'as'=89 +'on'=93 +'with'=94 +'info'=105 +'USING'=119 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index d2bee9c67af5b..c5d3ee29d0bda 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -278,6 +278,11 @@ public enum Cap { */ RANGEQUERY_FOR_DATETIME, + /** + * Enforce strict type checking on ENRICH range types, and warnings for KEYWORD parsing at runtime. Done in #115091. + */ + ENRICH_STRICT_RANGE_TYPES, + /** * Fix for non-unique attribute names in ROW and logical plans. * https://github.com/elastic/elasticsearch/issues/110541 @@ -302,7 +307,7 @@ public enum Cap { /** * Support for match operator as a colon. Previous support for match operator as MATCH has been removed */ - MATCH_OPERATOR_COLON(Build.current().isSnapshot()), + MATCH_OPERATOR_COLON, /** * Removing support for the {@code META} keyword. @@ -317,32 +322,32 @@ public enum Cap { /** * Support for nanosecond dates as a data type */ - DATE_NANOS_TYPE(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_TYPE(), /** * Support for to_date_nanos function */ - TO_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + TO_DATE_NANOS(), /** * Support for date nanos type in binary comparisons */ - DATE_NANOS_BINARY_COMPARISON(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_BINARY_COMPARISON(), /** * Support Least and Greatest functions on Date Nanos type */ - LEAST_GREATEST_FOR_DATENANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + LEAST_GREATEST_FOR_DATENANOS(), /** * Support for date_trunc function on date nanos type */ - DATE_TRUNC_DATE_NANOS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_TRUNC_DATE_NANOS(), /** * support aggregations on date nanos */ - DATE_NANOS_AGGREGATIONS(EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG), + DATE_NANOS_AGGREGATIONS(), /** * Support for datetime in least and greatest functions @@ -420,6 +425,12 @@ public enum Cap { */ SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN, + /** + * Fix {@code SORT} when the {@code _source} field is not a sort key but + * is being returned. + */ + SORT_RETURNING_SOURCE_OK, + /** * Allow filter per individual aggregation. */ @@ -489,7 +500,12 @@ public enum Cap { /** * Support implicit casting from string literal to DATE_PERIOD or TIME_DURATION. */ - IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT; + IMPLICIT_CASTING_STRING_LITERAL_TO_TEMPORAL_AMOUNT, + + /** + * LOOKUP JOIN + */ + JOIN_LOOKUP(Build.current().isSnapshot()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 562d42a94483f..7ad4c3d3e644d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.logging.HeaderWarning; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.Logger; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.Column; @@ -20,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.EmptyAttribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; @@ -75,6 +77,12 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -98,12 +106,14 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; +import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.core.enrich.EnrichPolicy.GEO_MATCH_TYPE; @@ -189,6 +199,9 @@ private static class ResolveTable extends ParameterizedAnalyzerRule maybeResolveAttribute(ua, childrenOutput)); } @@ -588,6 +620,77 @@ private LogicalPlan resolveLookup(Lookup l, List childrenOutput) { return l; } + private Join resolveLookupJoin(LookupJoin join) { + JoinConfig config = join.config(); + // for now, support only (LEFT) USING clauses + JoinType type = config.type(); + // rewrite the join into a equi-join between the field with the same name between left and right + // per SQL standard, the USING columns are placed first in the output, followed by the rest of left, then right + if (type instanceof UsingJoinType using) { + List cols = using.columns(); + // the lookup cannot be resolved, bail out + if (Expressions.anyMatch(cols, c -> c instanceof UnresolvedAttribute ua && ua.customMessage())) { + return join; + } + + JoinType coreJoin = using.coreJoin(); + // verify the join type + if (coreJoin != JoinTypes.LEFT) { + String name = cols.get(0).name(); + UnresolvedAttribute errorAttribute = new UnresolvedAttribute( + join.source(), + name, + "Only LEFT join is supported with USING" + ); + return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); + } + // resolve the using columns against the left and the right side then assemble the new join config + List leftKeys = resolveUsingColumns(cols, join.left().output(), "left"); + List rightKeys = resolveUsingColumns(cols, join.right().output(), "right"); + List output = new ArrayList<>(join.left().output()); + // the order is stable (since the AttributeSet preservers the insertion order) + output.addAll(join.right().outputSet().subtract(new AttributeSet(rightKeys))); + + // update the config - pick the left keys as those in the output + type = new UsingJoinType(coreJoin, rightKeys); + config = new JoinConfig(type, leftKeys, leftKeys, rightKeys); + join = new LookupJoin(join.source(), join.left(), join.right(), config, output); + } + // everything else is unsupported for now + else { + UnresolvedAttribute errorAttribute = new UnresolvedAttribute(join.source(), "unsupported", "Unsupported join type"); + // add error message + return join.withConfig(new JoinConfig(type, singletonList(errorAttribute), emptyList(), emptyList())); + } + return join; + } + + private List resolveUsingColumns(List cols, List output, String side) { + List resolved = new ArrayList<>(cols.size()); + for (Attribute col : cols) { + if (col instanceof UnresolvedAttribute ua) { + Attribute resolvedCol = maybeResolveAttribute(ua, output); + if (resolvedCol instanceof UnresolvedAttribute ucol) { + String message = ua.unresolvedMessage(); + String match = "column [" + ucol.name() + "]"; + resolvedCol = ucol.withUnresolvedMessage(message.replace(match, match + "in " + side + " side of join")); + } + resolved.add(resolvedCol); + } + // columns are expected to be unresolved - if that's not the case return an error + else { + return singletonList( + new UnresolvedAttribute( + col.source(), + col.name(), + "Surprised to discover column [ " + col.name() + "] already resolved" + ) + ); + } + } + return resolved; + } + private Attribute maybeResolveAttribute(UnresolvedAttribute ua, List childrenOutput) { return maybeResolveAttribute(ua, childrenOutput, log); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java index 9d7c5e141a2b1..460d30618df79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/PreAnalyzer.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; @@ -22,14 +23,16 @@ public class PreAnalyzer { public static class PreAnalysis { - public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList()); + public static final PreAnalysis EMPTY = new PreAnalysis(emptyList(), emptyList(), emptyList()); public final List indices; public final List enriches; + public final List lookupIndices; - public PreAnalysis(List indices, List enriches) { + public PreAnalysis(List indices, List enriches, List lookupIndices) { this.indices = indices; this.enriches = enriches; + this.lookupIndices = lookupIndices; } } @@ -44,13 +47,17 @@ public PreAnalysis preAnalyze(LogicalPlan plan) { protected PreAnalysis doPreAnalyze(LogicalPlan plan) { List indices = new ArrayList<>(); List unresolvedEnriches = new ArrayList<>(); + List lookupIndices = new ArrayList<>(); - plan.forEachUp(UnresolvedRelation.class, p -> indices.add(new TableInfo(p.table()))); + plan.forEachUp(UnresolvedRelation.class, p -> { + List list = p.indexMode() == IndexMode.LOOKUP ? lookupIndices : indices; + list.add(new TableInfo(p.table())); + }); plan.forEachUp(Enrich.class, unresolvedEnriches::add); // mark plan as preAnalyzed (if it were marked, there would be no analysis) plan.forEachUp(LogicalPlan::setPreAnalyzed); - return new PreAnalysis(indices, unresolvedEnriches); + return new PreAnalysis(indices, unresolvedEnriches, lookupIndices); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d399c826e0bf2..694328e57b5ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.esql.analysis; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.capabilities.Unresolvable; @@ -52,6 +54,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.stats.FeatureMetric; import org.elasticsearch.xpack.esql.stats.Metrics; @@ -82,9 +85,11 @@ public class Verifier { private final Metrics metrics; + private final XPackLicenseState licenseState; - public Verifier(Metrics metrics) { + public Verifier(Metrics metrics, XPackLicenseState licenseState) { this.metrics = metrics; + this.licenseState = licenseState; } /** @@ -168,6 +173,20 @@ else if (p instanceof Lookup lookup) { else { lookup.matchFields().forEach(unresolvedExpressions); } + } else if (p instanceof LookupJoin lj) { + // expect right side to always be a lookup index + lj.right().forEachUp(EsRelation.class, r -> { + if (r.indexMode() != IndexMode.LOOKUP) { + failures.add( + fail( + r, + "LOOKUP JOIN right side [{}] must be a lookup index (index_mode=lookup, not [{}]", + r.index().name(), + r.indexMode().getName() + ) + ); + } + }); } else { @@ -201,6 +220,10 @@ else if (p instanceof Lookup lookup) { }); checkRemoteEnrich(plan, failures); + if (failures.isEmpty()) { + checkLicense(plan, licenseState, failures); + } + // gather metrics if (failures.isEmpty()) { gatherMetrics(plan, partialMetrics); @@ -488,7 +511,7 @@ private static void checkRow(LogicalPlan p, Set failures) { if (p instanceof Row row) { row.fields().forEach(a -> { if (DataType.isRepresentable(a.dataType()) == false) { - failures.add(fail(a, "cannot use [{}] directly in a row assignment", a.child().sourceText())); + failures.add(fail(a.child(), "cannot use [{}] directly in a row assignment", a.child().sourceText())); } }); } @@ -546,6 +569,14 @@ private static void checkBinaryComparison(LogicalPlan p, Set failures) }); } + private void checkLicense(LogicalPlan plan, XPackLicenseState licenseState, Set failures) { + plan.forEachExpressionDown(Function.class, p -> { + if (p.checkLicense(licenseState) == false) { + failures.add(new Failure(p, "current license is non-compliant for function [" + p.sourceText() + "]")); + } + }); + } + private void gatherMetrics(LogicalPlan plan, BitSet b) { plan.forEachDown(p -> FeatureMetric.set(p, b)); for (int i = b.nextSetBit(0); i >= 0; i = b.nextSetBit(i + 1)) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index 286ddbaa29a5b..e52e9ae989a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -41,6 +41,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OutputOperator; +import org.elasticsearch.compute.operator.Warnings; import org.elasticsearch.compute.operator.lookup.EnrichQuerySourceOperator; import org.elasticsearch.compute.operator.lookup.MergePositionsOperator; import org.elasticsearch.compute.operator.lookup.QueryList; @@ -78,6 +79,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.EsPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -166,6 +168,10 @@ abstract class AbstractLookupService list releasables.add(mergePositionsOperator); SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); QueryList queryList = queryList(request, searchExecutionContext, inputBlock, request.inputDataType); + var warnings = Warnings.createWarnings( + DriverContext.WarningsMode.COLLECT, + request.source.source().getLineNumber(), + request.source.source().getColumnNumber(), + request.source.text() + ); var queryOperator = new EnrichQuerySourceOperator( driverContext.blockFactory(), EnrichQuerySourceOperator.DEFAULT_MAX_PAGE_SIZE, queryList, - searchExecutionContext.getIndexReader() + searchExecutionContext.getIndexReader(), + warnings ); releasables.add(queryOperator); var extractFieldsOperator = extractFieldsOperator(searchContext, driverContext, request.extractFields); @@ -447,13 +460,22 @@ abstract static class Request { final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; - Request(String sessionId, String index, DataType inputDataType, Page inputPage, List extractFields) { + Request( + String sessionId, + String index, + DataType inputDataType, + Page inputPage, + List extractFields, + Source source + ) { this.sessionId = sessionId; this.index = index; this.inputDataType = inputDataType; this.inputPage = inputPage; this.extractFields = extractFields; + this.source = source; } } @@ -467,6 +489,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans final DataType inputDataType; final Page inputPage; final List extractFields; + final Source source; // TODO: Remove this workaround once we have Block RefCount final Page toRelease; final RefCounted refs = AbstractRefCounted.of(this::releasePage); @@ -477,7 +500,8 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans DataType inputDataType, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { this.sessionId = sessionId; this.shardId = shardId; @@ -485,6 +509,7 @@ abstract static class TransportRequest extends org.elasticsearch.transport.Trans this.inputPage = inputPage; this.toRelease = toRelease; this.extractFields = extractFields; + this.source = source; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java index 6e5845fae33b7..df608a04632a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupOperator.java @@ -16,9 +16,11 @@ import org.elasticsearch.compute.operator.AsyncOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.ResponseHeadersCollector; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -35,6 +37,8 @@ public final class EnrichLookupOperator extends AsyncOperator { private final String matchType; private final String matchField; private final List enrichFields; + private final ResponseHeadersCollector responseHeadersCollector; + private final Source source; private long totalTerms = 0L; public record Factory( @@ -47,7 +51,8 @@ public record Factory( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -75,7 +80,8 @@ public Operator get(DriverContext driverContext) { enrichIndex, matchType, matchField, - enrichFields + enrichFields, + source ); } } @@ -91,7 +97,8 @@ public EnrichLookupOperator( String enrichIndex, String matchType, String matchField, - List enrichFields + List enrichFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -103,6 +110,8 @@ public EnrichLookupOperator( this.matchType = matchType; this.matchField = matchField; this.enrichFields = enrichFields; + this.source = source; + this.responseHeadersCollector = new ResponseHeadersCollector(enrichLookupService.getThreadContext()); } @Override @@ -116,9 +125,14 @@ protected void performAsync(Page inputPage, ActionListener listener) { matchType, matchField, new Page(inputBlock), - enrichFields + enrichFields, + source + ); + enrichLookupService.lookupAsync( + request, + parentTask, + ActionListener.runBefore(listener.map(inputPage::appendPage), responseHeadersCollector::collect) ); - enrichLookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } @Override @@ -140,6 +154,7 @@ public String toString() { protected void doClose() { // TODO: Maybe create a sub-task as the parent task of all the lookup tasks // then cancel it when this operator terminates early (e.g., have enough result). + responseHeadersCollector.finish(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 2d85b46e33a8c..50a1ffce4841f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.lookup.QueryList; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.RangeFieldMapper; +import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchService; @@ -27,6 +29,7 @@ import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -71,13 +74,15 @@ protected TransportRequest transportRequest(EnrichLookupService.Request request, request.matchField, request.inputPage, null, - request.extractFields + request.extractFields, + request.source ); } @Override protected QueryList queryList(TransportRequest request, SearchExecutionContext context, Block inputBlock, DataType inputDataType) { MappedFieldType fieldType = context.getFieldType(request.matchField); + validateTypes(inputDataType, fieldType); return switch (request.matchType) { case "match", "range" -> termQueryList(fieldType, context, inputBlock, inputDataType); case "geo_match" -> QueryList.geoShapeQueryList(fieldType, context, inputBlock); @@ -85,6 +90,33 @@ protected QueryList queryList(TransportRequest request, SearchExecutionContext c }; } + private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) { + if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) { + // For range policy types, the ENRICH index field type will be one of a list of supported range types, + // which need to match the input data type (eg. ip-range -> ip, date-range -> date, etc.) + if (rangeTypesCompatible(rangeType.rangeType(), inputDataType) == false) { + throw new EsqlIllegalArgumentException( + "ENRICH range and input types are incompatible: range[" + rangeType.rangeType() + "], input[" + inputDataType + "]" + ); + } + } + // For match policies, the ENRICH index field will always be KEYWORD, and input type will be converted to KEYWORD. + // For geo_match, type validation is done earlier, in the Analyzer. + } + + private static boolean rangeTypesCompatible(RangeType rangeType, DataType inputDataType) { + if (inputDataType.noText() == DataType.KEYWORD) { + // We allow runtime parsing of string types to numeric types + return true; + } + return switch (rangeType) { + case INTEGER, LONG -> inputDataType.isWholeNumber(); + case IP -> inputDataType == DataType.IP; + case DATE -> inputDataType.isDate(); + default -> rangeType.isNumeric() == inputDataType.isNumeric(); + }; + } + public static class Request extends AbstractLookupService.Request { private final String matchType; private final String matchField; @@ -96,9 +128,10 @@ public static class Request extends AbstractLookupService.Request { String matchType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -116,9 +149,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR String matchField, Page inputPage, Page toRelease, - List extractFields + List extractFields, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchType = matchType; this.matchField = matchField; } @@ -138,6 +172,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro } PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -146,7 +184,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro matchField, inputPage, inputPage, - extractFields + extractFields, + source ); result.setParentTask(parentTaskId); return result; @@ -165,6 +204,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeWriteable(inputPage); PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java index 836b400c54f8c..f09f7d0e23e7b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperator.java @@ -19,6 +19,7 @@ import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; @@ -36,7 +37,8 @@ public record Factory( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) implements OperatorFactory { @Override public String describe() { @@ -63,7 +65,8 @@ public Operator get(DriverContext driverContext) { inputDataType, lookupIndex, matchField, - loadFields + loadFields, + source ); } } @@ -76,6 +79,7 @@ public Operator get(DriverContext driverContext) { private final String lookupIndex; private final String matchField; private final List loadFields; + private final Source source; private long totalTerms = 0L; public LookupFromIndexOperator( @@ -88,7 +92,8 @@ public LookupFromIndexOperator( DataType inputDataType, String lookupIndex, String matchField, - List loadFields + List loadFields, + Source source ) { super(driverContext, maxOutstandingRequests); this.sessionId = sessionId; @@ -99,6 +104,7 @@ public LookupFromIndexOperator( this.lookupIndex = lookupIndex; this.matchField = matchField; this.loadFields = loadFields; + this.source = source; } @Override @@ -111,7 +117,8 @@ protected void performAsync(Page inputPage, ActionListener listener) { inputDataType, matchField, new Page(inputBlock), - loadFields + loadFields, + source ); lookupService.lookupAsync(request, parentTask, listener.map(inputPage::appendPage)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index ef204e88c234f..849e8e890e248 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.enrich; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,6 +26,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; @@ -68,7 +70,8 @@ protected TransportRequest transportRequest(LookupFromIndexService.Request reque request.inputPage, null, request.extractFields, - request.matchField + request.matchField, + request.source ); } @@ -87,9 +90,10 @@ public static class Request extends AbstractLookupService.Request { DataType inputDataType, String matchField, Page inputPage, - List extractFields + List extractFields, + Source source ) { - super(sessionId, index, inputDataType, inputPage, extractFields); + super(sessionId, index, inputDataType, inputPage, extractFields, source); this.matchField = matchField; } } @@ -104,9 +108,10 @@ protected static class TransportRequest extends AbstractLookupService.TransportR Page inputPage, Page toRelease, List extractFields, - String matchField + String matchField, + Source source ) { - super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields); + super(sessionId, shardId, inputDataType, inputPage, toRelease, extractFields, source); this.matchField = matchField; } @@ -122,6 +127,10 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); String matchField = in.readString(); + var source = Source.EMPTY; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source = Source.readFrom(planIn); + } TransportRequest result = new TransportRequest( sessionId, shardId, @@ -129,7 +138,8 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro inputPage, inputPage, extractFields, - matchField + matchField, + source ); result.setParentTask(parentTaskId); return result; @@ -145,6 +155,9 @@ public void writeTo(StreamOutput out) throws IOException { PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); out.writeString(matchField); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + source.writeTo(planOut); + } } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index 816388193c5f6..c1269009c6a41 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; @@ -40,13 +41,13 @@ public class PlanExecutor { private final Verifier verifier; private final PlanningMetricsManager planningMetricsManager; - public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry) { + public PlanExecutor(IndexResolver indexResolver, MeterRegistry meterRegistry, XPackLicenseState licenseState) { this.indexResolver = indexResolver; this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); this.mapper = new Mapper(); this.metrics = new Metrics(functionRegistry); - this.verifier = new Verifier(metrics); + this.verifier = new Verifier(metrics, licenseState); this.planningMetricsManager = new PlanningMetricsManager(meterRegistry); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java new file mode 100644 index 0000000000000..7e2de0094c2ab --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/ExpressionWritables.java @@ -0,0 +1,213 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.ExpressionCoreWritables; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; +import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; +import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables; +import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.ScalarFunctionWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvFunctionWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; +import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ByteLength; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; + +import java.util.ArrayList; +import java.util.List; + +public class ExpressionWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + + entries.addAll(allExpressions()); + entries.addAll(aggregates()); + entries.addAll(scalars()); + entries.addAll(spatials()); + entries.addAll(arithmetics()); + entries.addAll(binaryComparisons()); + entries.addAll(fullText()); + entries.addAll(unaryScalars()); + return entries; + } + + public static List attributes() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.attributes()); + entries.add(UnsupportedAttribute.ENTRY); + return entries; + } + + public static List namedExpressions() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.namedExpressions()); + entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); + return entries; + } + + public static List expressions() { + List entries = new ArrayList<>(); + entries.addAll(ExpressionCoreWritables.expressions()); + entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); + entries.add(Order.ENTRY); + return entries; + } + + public static List allExpressions() { + List entries = new ArrayList<>(); + entries.addAll(expressions()); + entries.addAll(namedExpressions()); + entries.addAll(attributes()); + return entries; + } + + public static List aggregates() { + return AggregateWritables.getNamedWriteables(); + } + + public static List scalars() { + return ScalarFunctionWritables.getNamedWriteables(); + } + + public static List unaryScalars() { + List entries = new ArrayList<>(); + entries.add(Abs.ENTRY); + entries.add(Acos.ENTRY); + entries.add(Asin.ENTRY); + entries.add(Atan.ENTRY); + entries.add(ByteLength.ENTRY); + entries.add(Cbrt.ENTRY); + entries.add(Ceil.ENTRY); + entries.add(Cos.ENTRY); + entries.add(Cosh.ENTRY); + entries.add(Exp.ENTRY); + entries.add(Floor.ENTRY); + entries.add(FromBase64.ENTRY); + entries.add(IsNotNull.ENTRY); + entries.add(IsNull.ENTRY); + entries.add(Length.ENTRY); + entries.add(Log10.ENTRY); + entries.add(LTrim.ENTRY); + entries.add(Neg.ENTRY); + entries.add(Not.ENTRY); + entries.add(RLike.ENTRY); + entries.add(RTrim.ENTRY); + entries.add(Signum.ENTRY); + entries.add(Sin.ENTRY); + entries.add(Sinh.ENTRY); + entries.add(Space.ENTRY); + entries.add(Sqrt.ENTRY); + entries.add(StX.ENTRY); + entries.add(StY.ENTRY); + entries.add(Tan.ENTRY); + entries.add(Tanh.ENTRY); + entries.add(ToBase64.ENTRY); + entries.add(ToBoolean.ENTRY); + entries.add(ToCartesianPoint.ENTRY); + entries.add(ToDatetime.ENTRY); + entries.add(ToDateNanos.ENTRY); + entries.add(ToDegrees.ENTRY); + entries.add(ToDouble.ENTRY); + entries.add(ToGeoShape.ENTRY); + entries.add(ToCartesianShape.ENTRY); + entries.add(ToGeoPoint.ENTRY); + entries.add(ToIP.ENTRY); + entries.add(ToInteger.ENTRY); + entries.add(ToLong.ENTRY); + entries.add(ToRadians.ENTRY); + entries.add(ToString.ENTRY); + entries.add(ToUnsignedLong.ENTRY); + entries.add(ToVersion.ENTRY); + entries.add(Trim.ENTRY); + entries.add(WildcardLike.ENTRY); + entries.add(Delay.ENTRY); + // mv functions + entries.addAll(MvFunctionWritables.getNamedWriteables()); + return entries; + } + + private static List spatials() { + return List.of(SpatialContains.ENTRY, SpatialDisjoint.ENTRY, SpatialIntersects.ENTRY, SpatialWithin.ENTRY, StDistance.ENTRY); + } + + private static List arithmetics() { + return List.of(Add.ENTRY, Div.ENTRY, Mod.ENTRY, Mul.ENTRY, Sub.ENTRY); + } + + private static List binaryComparisons() { + return List.of(Equals.ENTRY, GreaterThan.ENTRY, GreaterThanOrEqual.ENTRY, LessThan.ENTRY, LessThanOrEqual.ENTRY, NotEquals.ENTRY); + } + + private static List fullText() { + return FullTextWritables.getNamedWriteables(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index ca02441d2e1ad..eafb1fdbcbdcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -501,7 +501,9 @@ private static DataType getTargetType(String[] names) { types.add(type); } } + return types.stream() + .filter(DATA_TYPE_CASTING_PRIORITY::containsKey) .min((dt1, dt2) -> DATA_TYPE_CASTING_PRIORITY.get(dt1).compareTo(DATA_TYPE_CASTING_PRIORITY.get(dt2))) .orElse(UNSUPPORTED); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java index f7a74cc2ae93f..87efccfc90ab3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.aggregate; import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; @@ -30,27 +29,6 @@ * A type of {@code Function} that takes multiple values and extracts a single value out of them. For example, {@code AVG()}. */ public abstract class AggregateFunction extends Function { - public static List getNamedWriteables() { - return List.of( - Avg.ENTRY, - Count.ENTRY, - CountDistinct.ENTRY, - Max.ENTRY, - Median.ENTRY, - MedianAbsoluteDeviation.ENTRY, - Min.ENTRY, - Percentile.ENTRY, - Rate.ENTRY, - SpatialCentroid.ENTRY, - Sum.ENTRY, - Top.ENTRY, - Values.ENTRY, - // internal functions - ToPartial.ENTRY, - FromPartial.ENTRY, - WeightedAvg.ENTRY - ); - } private final Expression field; private final List parameters; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java new file mode 100644 index 0000000000000..b9cfd8892dd69 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/AggregateWritables.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.aggregate; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class AggregateWritables { + + public static List getNamedWriteables() { + return List.of( + Avg.ENTRY, + Count.ENTRY, + CountDistinct.ENTRY, + Max.ENTRY, + Median.ENTRY, + MedianAbsoluteDeviation.ENTRY, + Min.ENTRY, + Percentile.ENTRY, + Rate.ENTRY, + SpatialCentroid.ENTRY, + Sum.ENTRY, + Top.ENTRY, + Values.ENTRY, + // internal functions + ToPartial.ENTRY, + FromPartial.ENTRY, + WeightedAvg.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java index fa8a9e7d8c837..3a0d616d407a3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Count.java @@ -58,7 +58,9 @@ public class Count extends AggregateFunction implements ToAggregator, SurrogateE ), @Example( description = "To count the same stream of data based on two different expressions " - + "use the pattern `COUNT( OR NULL)`", + + "use the pattern `COUNT( OR NULL)`. This builds on the three-valued logic " + + "({wikipedia}/Three-valued_logic[3VL]) of the language: `TRUE OR NULL` is `TRUE`, but `FALSE OR NULL` is `NULL`, " + + "plus the way COUNT handles `NULL`s: `COUNT(TRUE)` and `COUNT(FALSE)` are both 1, but `COUNT(NULL)` is 0.", file = "stats", tag = "count-or-null" ) } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java index ac2d4ff3cbc43..2165c3c7ad1a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Max.java @@ -55,7 +55,7 @@ public class Max extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The maximum value of a field.", isAggregation = true, examples = { @@ -72,7 +72,7 @@ public Max( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } + type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } ) Expression field ) { this(source, field, Literal.TRUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java index a5fc8196847b7..7d67868dd4134 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Min.java @@ -55,7 +55,7 @@ public class Min extends AggregateFunction implements ToAggregator, SurrogateExp ); @FunctionInfo( - returnType = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "long", "version" }, description = "The minimum value of a field.", isAggregation = true, examples = { @@ -72,7 +72,7 @@ public Min( Source source, @Param( name = "field", - type = { "boolean", "double", "integer", "long", "date", "ip", "keyword", "text", "long", "version" } + type = { "boolean", "double", "integer", "long", "date", "date_nanos", "ip", "keyword", "text", "long", "version" } ) Expression field ) { this(source, field, Literal.TRUE); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 111eab051719b..e7df990b20422 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -52,7 +52,7 @@ public class Values extends AggregateFunction implements ToAggregator { ); @FunctionInfo( - returnType = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "version" }, + returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, preview = true, description = "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. " + "If you need the values returned in order use <>.", @@ -70,7 +70,10 @@ public class Values extends AggregateFunction implements ToAggregator { ) public Values( Source source, - @Param(name = "field", type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "version" }) Expression v + @Param( + name = "field", + type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" } + ) Expression v ) { this(source, v, Literal.TRUE); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java index 4106df331d101..9f08401a42dd1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/package-info.java @@ -94,7 +94,7 @@ * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} * constant and add that constant to the list in - * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction#getNamedWriteables}. + * {@link org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateWritables#getNamedWriteables}. * *

  • * Do the same with {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 1a3667de992cd..9addf08e1b5f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.fulltext; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; @@ -29,9 +28,6 @@ * {@link org.elasticsearch.xpack.esql.optimizer.LocalPhysicalPlanOptimizer} to rewrite them into Lucene queries. */ public abstract class FullTextFunction extends Function { - public static List getNamedWriteables() { - return List.of(QueryString.ENTRY, Match.ENTRY); - } private final Expression query; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java new file mode 100644 index 0000000000000..7fdfb4b328869 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; +import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; + +import java.util.List; + +public class FullTextWritables { + + public static List getNamedWriteables() { + return List.of( + MatchQueryPredicate.ENTRY, + MultiMatchQueryPredicate.ENTRY, + StringQueryPredicate.ENTRY, + QueryString.ENTRY, + Match.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java new file mode 100644 index 0000000000000..89b9036e97e3a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/GroupingWritables.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.grouping; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class GroupingWritables { + + public static List getNamedWriteables() { + return List.of(Bucket.ENTRY, Categorize.ENTRY); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java index 65985f234ac92..404ce7e3900c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/EsqlScalarFunction.java @@ -7,57 +7,11 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.FullTextPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; -import org.elasticsearch.xpack.esql.expression.function.grouping.Bucket; -import org.elasticsearch.xpack.esql.expression.function.grouping.Categorize; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; -import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; -import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; -import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; -import java.util.ArrayList; import java.util.List; /** @@ -71,56 +25,6 @@ *

    */ public abstract class EsqlScalarFunction extends ScalarFunction implements EvaluatorMapper { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - entries.add(And.ENTRY); - entries.add(Atan2.ENTRY); - entries.add(BitLength.ENTRY); - entries.add(Bucket.ENTRY); - entries.add(Case.ENTRY); - entries.add(Categorize.ENTRY); - entries.add(CIDRMatch.ENTRY); - entries.add(Coalesce.ENTRY); - entries.add(Concat.ENTRY); - entries.add(E.ENTRY); - entries.add(EndsWith.ENTRY); - entries.add(Greatest.ENTRY); - entries.add(Hypot.ENTRY); - entries.add(In.ENTRY); - entries.add(InsensitiveEquals.ENTRY); - entries.add(DateExtract.ENTRY); - entries.add(DateDiff.ENTRY); - entries.add(DateFormat.ENTRY); - entries.add(DateParse.ENTRY); - entries.add(DateTrunc.ENTRY); - entries.add(IpPrefix.ENTRY); - entries.add(Least.ENTRY); - entries.add(Left.ENTRY); - entries.add(Locate.ENTRY); - entries.add(Log.ENTRY); - entries.add(Now.ENTRY); - entries.add(Or.ENTRY); - entries.add(Pi.ENTRY); - entries.add(Pow.ENTRY); - entries.add(Right.ENTRY); - entries.add(Repeat.ENTRY); - entries.add(Replace.ENTRY); - entries.add(Reverse.ENTRY); - entries.add(Round.ENTRY); - entries.add(Split.ENTRY); - entries.add(Substring.ENTRY); - entries.add(StartsWith.ENTRY); - entries.add(Tau.ENTRY); - entries.add(ToLower.ENTRY); - entries.add(ToUpper.ENTRY); - entries.addAll(BinarySpatialFunction.getNamedWriteables()); - entries.addAll(EsqlArithmeticOperation.getNamedWriteables()); - entries.addAll(EsqlBinaryComparison.getNamedWriteables()); - entries.addAll(FullTextPredicate.getNamedWriteables()); - entries.addAll(UnaryScalarFunction.getNamedWriteables()); - return entries; - } - protected EsqlScalarFunction(Source source) { super(source); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java new file mode 100644 index 0000000000000..192ca6c43e57d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingWritables; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; +import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Hypot; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.BitLength; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Reverse; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Split; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; + +import java.util.ArrayList; +import java.util.List; + +public class ScalarFunctionWritables { + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.add(And.ENTRY); + entries.add(Atan2.ENTRY); + entries.add(BitLength.ENTRY); + entries.add(Case.ENTRY); + entries.add(CIDRMatch.ENTRY); + entries.add(Coalesce.ENTRY); + entries.add(Concat.ENTRY); + entries.add(E.ENTRY); + entries.add(EndsWith.ENTRY); + entries.add(Greatest.ENTRY); + entries.add(Hypot.ENTRY); + entries.add(In.ENTRY); + entries.add(InsensitiveEquals.ENTRY); + entries.add(DateExtract.ENTRY); + entries.add(DateDiff.ENTRY); + entries.add(DateFormat.ENTRY); + entries.add(DateParse.ENTRY); + entries.add(DateTrunc.ENTRY); + entries.add(IpPrefix.ENTRY); + entries.add(Least.ENTRY); + entries.add(Left.ENTRY); + entries.add(Locate.ENTRY); + entries.add(Log.ENTRY); + entries.add(Now.ENTRY); + entries.add(Or.ENTRY); + entries.add(Pi.ENTRY); + entries.add(Pow.ENTRY); + entries.add(Right.ENTRY); + entries.add(Repeat.ENTRY); + entries.add(Replace.ENTRY); + entries.add(Reverse.ENTRY); + entries.add(Round.ENTRY); + entries.add(Split.ENTRY); + entries.add(Substring.ENTRY); + entries.add(StartsWith.ENTRY); + entries.add(Tau.ENTRY); + entries.add(ToLower.ENTRY); + entries.add(ToUpper.ENTRY); + + entries.addAll(GroupingWritables.getNamedWriteables()); + return entries; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java index 610fe1c5ea000..d2af110a5203f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java @@ -7,130 +7,20 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanos; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Exp; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan; -import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.ByteLength; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Space; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; -import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; -import org.elasticsearch.xpack.esql.expression.function.scalar.util.Delay; -import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; public abstract class UnaryScalarFunction extends EsqlScalarFunction { - public static List getNamedWriteables() { - List entries = new ArrayList<>(); - entries.add(Abs.ENTRY); - entries.add(Acos.ENTRY); - entries.add(Asin.ENTRY); - entries.add(Atan.ENTRY); - entries.add(ByteLength.ENTRY); - entries.add(Cbrt.ENTRY); - entries.add(Ceil.ENTRY); - entries.add(Cos.ENTRY); - entries.add(Cosh.ENTRY); - entries.add(Exp.ENTRY); - entries.add(Floor.ENTRY); - entries.add(FromBase64.ENTRY); - entries.add(IsNotNull.ENTRY); - entries.add(IsNull.ENTRY); - entries.add(Length.ENTRY); - entries.add(Log10.ENTRY); - entries.add(LTrim.ENTRY); - entries.add(Neg.ENTRY); - entries.add(Not.ENTRY); - entries.add(RLike.ENTRY); - entries.add(RTrim.ENTRY); - entries.add(Signum.ENTRY); - entries.add(Sin.ENTRY); - entries.add(Sinh.ENTRY); - entries.add(Space.ENTRY); - entries.add(Sqrt.ENTRY); - entries.add(StX.ENTRY); - entries.add(StY.ENTRY); - entries.add(Tan.ENTRY); - entries.add(Tanh.ENTRY); - entries.add(ToBase64.ENTRY); - entries.add(ToBoolean.ENTRY); - entries.add(ToCartesianPoint.ENTRY); - entries.add(ToDatetime.ENTRY); - entries.add(ToDateNanos.ENTRY); - entries.add(ToDegrees.ENTRY); - entries.add(ToDouble.ENTRY); - entries.add(ToGeoShape.ENTRY); - entries.add(ToCartesianShape.ENTRY); - entries.add(ToGeoPoint.ENTRY); - entries.add(ToIP.ENTRY); - entries.add(ToInteger.ENTRY); - entries.add(ToLong.ENTRY); - entries.add(ToRadians.ENTRY); - entries.add(ToString.ENTRY); - entries.add(ToUnsignedLong.ENTRY); - entries.add(ToVersion.ENTRY); - entries.add(Trim.ENTRY); - entries.add(WildcardLike.ENTRY); - entries.add(Delay.ENTRY); - entries.addAll(AbstractMultivalueFunction.getNamedWriteables()); - return entries; - } - protected final Expression field; public UnaryScalarFunction(Source source, Expression field) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 6e38d72500840..a35b67d7ac3fd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -62,7 +62,7 @@ public interface DateTruncFactoryProvider { protected static final ZoneId DEFAULT_TZ = ZoneOffset.UTC; @FunctionInfo( - returnType = "date", + returnType = { "date", "date_nanos" }, description = "Rounds down a date to the closest interval.", examples = { @Example(file = "date", tag = "docsDateTrunc"), @@ -83,7 +83,7 @@ public DateTrunc( type = { "date_period", "time_duration" }, description = "Interval; expressed using the timespan literal syntax." ) Expression interval, - @Param(name = "date", type = { "date" }, description = "Date expression") Expression field + @Param(name = "date", type = { "date", "date_nanos" }, description = "Date expression") Expression field ) { super(source, List.of(interval, field)); this.interval = interval; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 6a3b58728b192..a32761cfd9948 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.List; /** * Base class for functions that reduce multivalued fields into single valued fields. @@ -32,27 +30,6 @@ *

    */ public abstract class AbstractMultivalueFunction extends UnaryScalarFunction { - public static List getNamedWriteables() { - return List.of( - MvAppend.ENTRY, - MvAvg.ENTRY, - MvConcat.ENTRY, - MvCount.ENTRY, - MvDedupe.ENTRY, - MvFirst.ENTRY, - MvLast.ENTRY, - MvMax.ENTRY, - MvMedian.ENTRY, - MvMedianAbsoluteDeviation.ENTRY, - MvMin.ENTRY, - MvPercentile.ENTRY, - MvPSeriesWeightedSum.ENTRY, - MvSlice.ENTRY, - MvSort.ENTRY, - MvSum.ENTRY, - MvZip.ENTRY - ); - } protected AbstractMultivalueFunction(Source source, Expression field) { super(source, field); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java new file mode 100644 index 0000000000000..7f8fcd910ad6d --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFunctionWritables.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; + +import java.util.List; + +public class MvFunctionWritables { + public static List getNamedWriteables() { + return List.of( + MvAppend.ENTRY, + MvAvg.ENTRY, + MvConcat.ENTRY, + MvCount.ENTRY, + MvDedupe.ENTRY, + MvFirst.ENTRY, + MvLast.ENTRY, + MvMax.ENTRY, + MvMedian.ENTRY, + MvMedianAbsoluteDeviation.ENTRY, + MvMin.ENTRY, + MvPercentile.ENTRY, + MvPSeriesWeightedSum.ENTRY, + MvSlice.ENTRY, + MvSort.ENTRY, + MvSum.ENTRY, + MvZip.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java index 46538b77edc74..eccc7ee4672c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java @@ -112,7 +112,7 @@ * {@link org.elasticsearch.common.io.stream.NamedWriteable#writeTo}, * and a deserializing constructor. Then add an {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry} * constant and register it. To register it, look for a method like - * {@link org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction#getNamedWriteables()} + * {@link org.elasticsearch.xpack.esql.expression.function.scalar.ScalarFunctionWritables#getNamedWriteables()} * in your function's class hierarchy. Keep going up until you hit a function with that name. * Then add your new "ENTRY" constant to the list it returns. *
  • diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java index 8839244e6c601..4d08b0e9687ec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; import java.io.IOException; -import java.util.List; import java.util.Objects; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -40,9 +38,6 @@ * and of compatible CRS. For example geo_point and geo_shape can be compared, but not geo_point and cartesian_point. */ public abstract class BinarySpatialFunction extends BinaryScalarFunction implements SpatialEvaluatorFactory.SpatialSourceResolution { - public static List getNamedWriteables() { - return List.of(SpatialContains.ENTRY, SpatialDisjoint.ENTRY, SpatialIntersects.ENTRY, SpatialWithin.ENTRY, StDistance.ENTRY); - } private final SpatialTypeResolver spatialTypeResolver; private SpatialCrsType crsType; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index 62201bcfa858d..74394d796855f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -21,7 +20,6 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import java.io.IOException; -import java.util.List; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; @@ -31,9 +29,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; public abstract class EsqlArithmeticOperation extends ArithmeticOperation implements EvaluatorMapper { - public static List getNamedWriteables() { - return List.of(Add.ENTRY, Div.ENTRY, Mod.ENTRY, Mul.ENTRY, Sub.ENTRY); - } /** * The only role of this enum is to fit the super constructor that expects a BinaryOperation which is diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index db771a6354883..cbbf87fb6c4cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -25,7 +24,6 @@ import java.io.IOException; import java.time.ZoneId; -import java.util.List; import java.util.Map; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -33,9 +31,6 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { - public static List getNamedWriteables() { - return List.of(Equals.ENTRY, GreaterThan.ENTRY, GreaterThanOrEqual.ENTRY, LessThan.ENTRY, LessThanOrEqual.ENTRY, NotEquals.ENTRY); - } private final Map evaluatorMap; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java index 56ade3982e0d8..3ae7bd93092ef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThan.java @@ -53,12 +53,12 @@ public LessThan( Source source, @Param( name = "lhs", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, description = "An expression." ) Expression left, @Param( name = "rhs", - type = { "boolean", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, + type = { "boolean", "date_nanos", "date", "double", "integer", "ip", "keyword", "long", "text", "unsigned_long", "version" }, description = "An expression." ) Expression right ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java index 1e1cc3b86a9d5..47e5b9acfbf9d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamInput.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.esql.Column; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.NameId; +import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.session.Configuration; @@ -160,7 +161,7 @@ public Block[] readCachedBlockArray() throws IOException { @Override public String sourceText() { - return configuration.query(); + return configuration == null ? Source.EMPTY.text() : configuration.query(); } static void throwOnNullOptionalRead(Class type) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java index 153efa5b5c233..fb9d3f7e2f91e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineLimits.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; public final class PushDownAndCombineLimits extends OptimizerRules.OptimizerRule { @@ -63,7 +63,7 @@ public LogicalPlan rule(Limit limit) { } } } else if (limit.child() instanceof Join join) { - if (join.config().type() == JoinType.LEFT && join.right() instanceof LocalRelation) { + if (join.config().type() == JoinTypes.LEFT && join.right() instanceof LocalRelation) { // This is a hash join from something like a lookup. return join.replaceChildren(limit.replaceChild(join.left()), join.right()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java index 1c20f765c6d51..ea9cd76bcb9bc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/InsertFieldExtraction.java @@ -17,10 +17,12 @@ import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.LeafExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.rule.Rule; import java.util.ArrayList; +import java.util.Collections; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -93,8 +95,16 @@ public PhysicalPlan apply(PhysicalPlan plan) { private static Set missingAttributes(PhysicalPlan p) { var missing = new LinkedHashSet(); - var input = p.inputSet(); + var inputSet = p.inputSet(); + // FIXME: the extractors should work on the right side as well + // skip the lookup join since the right side is always materialized and a projection + if (p instanceof LookupJoinExec join) { + // collect fields used in the join condition + return Collections.emptySet(); + } + + var input = inputSet; // collect field attributes used inside expressions p.forEachExpression(TypedAttribute.class, f -> { if (f instanceof FieldAttribute || f instanceof MetadataAttribute) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 8f9c5956dddd5..c83fdbe8847a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -23,7 +23,11 @@ null null null null -':' +null +null +null +null +null '|' null null @@ -33,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -113,6 +118,10 @@ null null null null +'USING' +null +null +null null null null @@ -141,11 +150,15 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -155,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -235,6 +249,10 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +USING +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS @@ -262,11 +280,15 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE DIGIT LETTER @@ -286,6 +308,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -316,7 +339,6 @@ MINUS ASTERISK SLASH PERCENT -EXPRESSION_COLON NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET @@ -427,6 +449,16 @@ LOOKUP_FIELD_ID_PATTERN LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +JOIN_PIPE +JOIN_JOIN +JOIN_AS +JOIN_ON +USING +JOIN_UNQUOTED_IDENTIFER +JOIN_QUOTED_IDENTIFIER +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_PIPE METRICS_UNQUOTED_SOURCE METRICS_QUOTED_SOURCE @@ -461,8 +493,9 @@ SHOW_MODE SETTING_MODE LOOKUP_MODE LOOKUP_FIELD_MODE +JOIN_MODE METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 119, 1484, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 580, 8, 19, 11, 19, 12, 19, 581, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 590, 8, 20, 10, 20, 12, 20, 593, 9, 20, 1, 20, 3, 20, 596, 8, 20, 1, 20, 3, 20, 599, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 608, 8, 21, 10, 21, 12, 21, 611, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 619, 8, 22, 11, 22, 12, 22, 620, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 3, 29, 642, 8, 29, 1, 29, 4, 29, 645, 8, 29, 11, 29, 12, 29, 646, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 3, 32, 656, 8, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 663, 8, 34, 1, 35, 1, 35, 1, 35, 5, 35, 668, 8, 35, 10, 35, 12, 35, 671, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 679, 8, 35, 10, 35, 12, 35, 682, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 689, 8, 35, 1, 35, 3, 35, 692, 8, 35, 3, 35, 694, 8, 35, 1, 36, 4, 36, 697, 8, 36, 11, 36, 12, 36, 698, 1, 37, 4, 37, 702, 8, 37, 11, 37, 12, 37, 703, 1, 37, 1, 37, 5, 37, 708, 8, 37, 10, 37, 12, 37, 711, 9, 37, 1, 37, 1, 37, 4, 37, 715, 8, 37, 11, 37, 12, 37, 716, 1, 37, 4, 37, 720, 8, 37, 11, 37, 12, 37, 721, 1, 37, 1, 37, 5, 37, 726, 8, 37, 10, 37, 12, 37, 729, 9, 37, 3, 37, 731, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 737, 8, 37, 11, 37, 12, 37, 738, 1, 37, 1, 37, 3, 37, 743, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 3, 75, 874, 8, 75, 1, 75, 5, 75, 877, 8, 75, 10, 75, 12, 75, 880, 9, 75, 1, 75, 1, 75, 4, 75, 884, 8, 75, 11, 75, 12, 75, 885, 3, 75, 888, 8, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 902, 8, 78, 10, 78, 12, 78, 905, 9, 78, 1, 78, 1, 78, 3, 78, 909, 8, 78, 1, 78, 4, 78, 912, 8, 78, 11, 78, 12, 78, 913, 3, 78, 916, 8, 78, 1, 79, 1, 79, 4, 79, 920, 8, 79, 11, 79, 12, 79, 921, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 3, 96, 999, 8, 96, 1, 97, 4, 97, 1002, 8, 97, 11, 97, 12, 97, 1003, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 3, 108, 1053, 8, 108, 1, 109, 1, 109, 3, 109, 1057, 8, 109, 1, 109, 5, 109, 1060, 8, 109, 10, 109, 12, 109, 1063, 9, 109, 1, 109, 1, 109, 3, 109, 1067, 8, 109, 1, 109, 4, 109, 1070, 8, 109, 11, 109, 12, 109, 1071, 3, 109, 1074, 8, 109, 1, 110, 1, 110, 4, 110, 1078, 8, 110, 11, 110, 12, 110, 1079, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 130, 4, 130, 1165, 8, 130, 11, 130, 12, 130, 1166, 1, 130, 1, 130, 3, 130, 1171, 8, 130, 1, 130, 4, 130, 1174, 8, 130, 11, 130, 12, 130, 1175, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 4, 163, 1321, 8, 163, 11, 163, 12, 163, 1322, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 2, 609, 680, 0, 199, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 0, 165, 64, 167, 65, 169, 66, 171, 67, 173, 0, 175, 68, 177, 69, 179, 70, 181, 71, 183, 0, 185, 0, 187, 72, 189, 73, 191, 74, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 0, 205, 75, 207, 0, 209, 76, 211, 0, 213, 0, 215, 77, 217, 78, 219, 79, 221, 0, 223, 0, 225, 0, 227, 0, 229, 0, 231, 0, 233, 0, 235, 80, 237, 81, 239, 82, 241, 83, 243, 0, 245, 0, 247, 0, 249, 0, 251, 0, 253, 0, 255, 84, 257, 0, 259, 85, 261, 86, 263, 87, 265, 0, 267, 0, 269, 88, 271, 89, 273, 0, 275, 90, 277, 0, 279, 91, 281, 92, 283, 93, 285, 0, 287, 0, 289, 0, 291, 0, 293, 0, 295, 0, 297, 0, 299, 0, 301, 0, 303, 94, 305, 95, 307, 96, 309, 0, 311, 0, 313, 0, 315, 0, 317, 0, 319, 0, 321, 97, 323, 98, 325, 99, 327, 0, 329, 100, 331, 101, 333, 102, 335, 103, 337, 0, 339, 0, 341, 104, 343, 105, 345, 106, 347, 107, 349, 0, 351, 0, 353, 0, 355, 0, 357, 0, 359, 0, 361, 0, 363, 108, 365, 109, 367, 110, 369, 0, 371, 0, 373, 0, 375, 0, 377, 111, 379, 112, 381, 113, 383, 0, 385, 0, 387, 0, 389, 114, 391, 115, 393, 116, 395, 0, 397, 0, 399, 117, 401, 118, 403, 119, 405, 0, 407, 0, 409, 0, 411, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1512, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 171, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 1, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 2, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 205, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 3, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 225, 1, 0, 0, 0, 4, 227, 1, 0, 0, 0, 4, 229, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 4, 237, 1, 0, 0, 0, 4, 239, 1, 0, 0, 0, 4, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 5, 255, 1, 0, 0, 0, 5, 257, 1, 0, 0, 0, 5, 259, 1, 0, 0, 0, 5, 261, 1, 0, 0, 0, 5, 263, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 275, 1, 0, 0, 0, 6, 277, 1, 0, 0, 0, 6, 279, 1, 0, 0, 0, 6, 281, 1, 0, 0, 0, 6, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 7, 295, 1, 0, 0, 0, 7, 297, 1, 0, 0, 0, 7, 299, 1, 0, 0, 0, 7, 301, 1, 0, 0, 0, 7, 303, 1, 0, 0, 0, 7, 305, 1, 0, 0, 0, 7, 307, 1, 0, 0, 0, 8, 309, 1, 0, 0, 0, 8, 311, 1, 0, 0, 0, 8, 313, 1, 0, 0, 0, 8, 315, 1, 0, 0, 0, 8, 317, 1, 0, 0, 0, 8, 319, 1, 0, 0, 0, 8, 321, 1, 0, 0, 0, 8, 323, 1, 0, 0, 0, 8, 325, 1, 0, 0, 0, 9, 327, 1, 0, 0, 0, 9, 329, 1, 0, 0, 0, 9, 331, 1, 0, 0, 0, 9, 333, 1, 0, 0, 0, 9, 335, 1, 0, 0, 0, 10, 337, 1, 0, 0, 0, 10, 339, 1, 0, 0, 0, 10, 341, 1, 0, 0, 0, 10, 343, 1, 0, 0, 0, 10, 345, 1, 0, 0, 0, 10, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 11, 351, 1, 0, 0, 0, 11, 353, 1, 0, 0, 0, 11, 355, 1, 0, 0, 0, 11, 357, 1, 0, 0, 0, 11, 359, 1, 0, 0, 0, 11, 361, 1, 0, 0, 0, 11, 363, 1, 0, 0, 0, 11, 365, 1, 0, 0, 0, 11, 367, 1, 0, 0, 0, 12, 369, 1, 0, 0, 0, 12, 371, 1, 0, 0, 0, 12, 373, 1, 0, 0, 0, 12, 375, 1, 0, 0, 0, 12, 377, 1, 0, 0, 0, 12, 379, 1, 0, 0, 0, 12, 381, 1, 0, 0, 0, 13, 383, 1, 0, 0, 0, 13, 385, 1, 0, 0, 0, 13, 387, 1, 0, 0, 0, 13, 389, 1, 0, 0, 0, 13, 391, 1, 0, 0, 0, 13, 393, 1, 0, 0, 0, 14, 395, 1, 0, 0, 0, 14, 397, 1, 0, 0, 0, 14, 399, 1, 0, 0, 0, 14, 401, 1, 0, 0, 0, 14, 403, 1, 0, 0, 0, 14, 405, 1, 0, 0, 0, 14, 407, 1, 0, 0, 0, 14, 409, 1, 0, 0, 0, 14, 411, 1, 0, 0, 0, 15, 413, 1, 0, 0, 0, 17, 423, 1, 0, 0, 0, 19, 430, 1, 0, 0, 0, 21, 439, 1, 0, 0, 0, 23, 446, 1, 0, 0, 0, 25, 456, 1, 0, 0, 0, 27, 463, 1, 0, 0, 0, 29, 470, 1, 0, 0, 0, 31, 477, 1, 0, 0, 0, 33, 485, 1, 0, 0, 0, 35, 497, 1, 0, 0, 0, 37, 506, 1, 0, 0, 0, 39, 512, 1, 0, 0, 0, 41, 519, 1, 0, 0, 0, 43, 526, 1, 0, 0, 0, 45, 534, 1, 0, 0, 0, 47, 542, 1, 0, 0, 0, 49, 557, 1, 0, 0, 0, 51, 567, 1, 0, 0, 0, 53, 579, 1, 0, 0, 0, 55, 585, 1, 0, 0, 0, 57, 602, 1, 0, 0, 0, 59, 618, 1, 0, 0, 0, 61, 624, 1, 0, 0, 0, 63, 626, 1, 0, 0, 0, 65, 630, 1, 0, 0, 0, 67, 632, 1, 0, 0, 0, 69, 634, 1, 0, 0, 0, 71, 637, 1, 0, 0, 0, 73, 639, 1, 0, 0, 0, 75, 648, 1, 0, 0, 0, 77, 650, 1, 0, 0, 0, 79, 655, 1, 0, 0, 0, 81, 657, 1, 0, 0, 0, 83, 662, 1, 0, 0, 0, 85, 693, 1, 0, 0, 0, 87, 696, 1, 0, 0, 0, 89, 742, 1, 0, 0, 0, 91, 744, 1, 0, 0, 0, 93, 747, 1, 0, 0, 0, 95, 751, 1, 0, 0, 0, 97, 755, 1, 0, 0, 0, 99, 757, 1, 0, 0, 0, 101, 760, 1, 0, 0, 0, 103, 762, 1, 0, 0, 0, 105, 767, 1, 0, 0, 0, 107, 769, 1, 0, 0, 0, 109, 775, 1, 0, 0, 0, 111, 781, 1, 0, 0, 0, 113, 784, 1, 0, 0, 0, 115, 787, 1, 0, 0, 0, 117, 792, 1, 0, 0, 0, 119, 797, 1, 0, 0, 0, 121, 799, 1, 0, 0, 0, 123, 803, 1, 0, 0, 0, 125, 808, 1, 0, 0, 0, 127, 814, 1, 0, 0, 0, 129, 817, 1, 0, 0, 0, 131, 819, 1, 0, 0, 0, 133, 825, 1, 0, 0, 0, 135, 827, 1, 0, 0, 0, 137, 832, 1, 0, 0, 0, 139, 835, 1, 0, 0, 0, 141, 838, 1, 0, 0, 0, 143, 841, 1, 0, 0, 0, 145, 843, 1, 0, 0, 0, 147, 846, 1, 0, 0, 0, 149, 848, 1, 0, 0, 0, 151, 851, 1, 0, 0, 0, 153, 853, 1, 0, 0, 0, 155, 855, 1, 0, 0, 0, 157, 857, 1, 0, 0, 0, 159, 859, 1, 0, 0, 0, 161, 861, 1, 0, 0, 0, 163, 866, 1, 0, 0, 0, 165, 887, 1, 0, 0, 0, 167, 889, 1, 0, 0, 0, 169, 894, 1, 0, 0, 0, 171, 915, 1, 0, 0, 0, 173, 917, 1, 0, 0, 0, 175, 925, 1, 0, 0, 0, 177, 927, 1, 0, 0, 0, 179, 931, 1, 0, 0, 0, 181, 935, 1, 0, 0, 0, 183, 939, 1, 0, 0, 0, 185, 944, 1, 0, 0, 0, 187, 949, 1, 0, 0, 0, 189, 953, 1, 0, 0, 0, 191, 957, 1, 0, 0, 0, 193, 961, 1, 0, 0, 0, 195, 966, 1, 0, 0, 0, 197, 970, 1, 0, 0, 0, 199, 974, 1, 0, 0, 0, 201, 978, 1, 0, 0, 0, 203, 982, 1, 0, 0, 0, 205, 986, 1, 0, 0, 0, 207, 998, 1, 0, 0, 0, 209, 1001, 1, 0, 0, 0, 211, 1005, 1, 0, 0, 0, 213, 1009, 1, 0, 0, 0, 215, 1013, 1, 0, 0, 0, 217, 1017, 1, 0, 0, 0, 219, 1021, 1, 0, 0, 0, 221, 1025, 1, 0, 0, 0, 223, 1030, 1, 0, 0, 0, 225, 1034, 1, 0, 0, 0, 227, 1038, 1, 0, 0, 0, 229, 1043, 1, 0, 0, 0, 231, 1052, 1, 0, 0, 0, 233, 1073, 1, 0, 0, 0, 235, 1077, 1, 0, 0, 0, 237, 1081, 1, 0, 0, 0, 239, 1085, 1, 0, 0, 0, 241, 1089, 1, 0, 0, 0, 243, 1093, 1, 0, 0, 0, 245, 1098, 1, 0, 0, 0, 247, 1102, 1, 0, 0, 0, 249, 1106, 1, 0, 0, 0, 251, 1110, 1, 0, 0, 0, 253, 1115, 1, 0, 0, 0, 255, 1120, 1, 0, 0, 0, 257, 1123, 1, 0, 0, 0, 259, 1127, 1, 0, 0, 0, 261, 1131, 1, 0, 0, 0, 263, 1135, 1, 0, 0, 0, 265, 1139, 1, 0, 0, 0, 267, 1144, 1, 0, 0, 0, 269, 1149, 1, 0, 0, 0, 271, 1154, 1, 0, 0, 0, 273, 1161, 1, 0, 0, 0, 275, 1170, 1, 0, 0, 0, 277, 1177, 1, 0, 0, 0, 279, 1181, 1, 0, 0, 0, 281, 1185, 1, 0, 0, 0, 283, 1189, 1, 0, 0, 0, 285, 1193, 1, 0, 0, 0, 287, 1199, 1, 0, 0, 0, 289, 1203, 1, 0, 0, 0, 291, 1207, 1, 0, 0, 0, 293, 1211, 1, 0, 0, 0, 295, 1215, 1, 0, 0, 0, 297, 1219, 1, 0, 0, 0, 299, 1223, 1, 0, 0, 0, 301, 1228, 1, 0, 0, 0, 303, 1233, 1, 0, 0, 0, 305, 1237, 1, 0, 0, 0, 307, 1241, 1, 0, 0, 0, 309, 1245, 1, 0, 0, 0, 311, 1250, 1, 0, 0, 0, 313, 1254, 1, 0, 0, 0, 315, 1259, 1, 0, 0, 0, 317, 1264, 1, 0, 0, 0, 319, 1268, 1, 0, 0, 0, 321, 1272, 1, 0, 0, 0, 323, 1276, 1, 0, 0, 0, 325, 1280, 1, 0, 0, 0, 327, 1284, 1, 0, 0, 0, 329, 1289, 1, 0, 0, 0, 331, 1294, 1, 0, 0, 0, 333, 1298, 1, 0, 0, 0, 335, 1302, 1, 0, 0, 0, 337, 1306, 1, 0, 0, 0, 339, 1311, 1, 0, 0, 0, 341, 1320, 1, 0, 0, 0, 343, 1324, 1, 0, 0, 0, 345, 1328, 1, 0, 0, 0, 347, 1332, 1, 0, 0, 0, 349, 1336, 1, 0, 0, 0, 351, 1341, 1, 0, 0, 0, 353, 1345, 1, 0, 0, 0, 355, 1349, 1, 0, 0, 0, 357, 1353, 1, 0, 0, 0, 359, 1358, 1, 0, 0, 0, 361, 1362, 1, 0, 0, 0, 363, 1366, 1, 0, 0, 0, 365, 1370, 1, 0, 0, 0, 367, 1374, 1, 0, 0, 0, 369, 1378, 1, 0, 0, 0, 371, 1384, 1, 0, 0, 0, 373, 1388, 1, 0, 0, 0, 375, 1392, 1, 0, 0, 0, 377, 1396, 1, 0, 0, 0, 379, 1400, 1, 0, 0, 0, 381, 1404, 1, 0, 0, 0, 383, 1408, 1, 0, 0, 0, 385, 1413, 1, 0, 0, 0, 387, 1419, 1, 0, 0, 0, 389, 1425, 1, 0, 0, 0, 391, 1429, 1, 0, 0, 0, 393, 1433, 1, 0, 0, 0, 395, 1437, 1, 0, 0, 0, 397, 1443, 1, 0, 0, 0, 399, 1449, 1, 0, 0, 0, 401, 1453, 1, 0, 0, 0, 403, 1457, 1, 0, 0, 0, 405, 1461, 1, 0, 0, 0, 407, 1467, 1, 0, 0, 0, 409, 1473, 1, 0, 0, 0, 411, 1479, 1, 0, 0, 0, 413, 414, 7, 0, 0, 0, 414, 415, 7, 1, 0, 0, 415, 416, 7, 2, 0, 0, 416, 417, 7, 2, 0, 0, 417, 418, 7, 3, 0, 0, 418, 419, 7, 4, 0, 0, 419, 420, 7, 5, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 0, 0, 0, 422, 16, 1, 0, 0, 0, 423, 424, 7, 0, 0, 0, 424, 425, 7, 6, 0, 0, 425, 426, 7, 7, 0, 0, 426, 427, 7, 8, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 1, 1, 0, 429, 18, 1, 0, 0, 0, 430, 431, 7, 3, 0, 0, 431, 432, 7, 9, 0, 0, 432, 433, 7, 6, 0, 0, 433, 434, 7, 1, 0, 0, 434, 435, 7, 4, 0, 0, 435, 436, 7, 10, 0, 0, 436, 437, 1, 0, 0, 0, 437, 438, 6, 2, 2, 0, 438, 20, 1, 0, 0, 0, 439, 440, 7, 3, 0, 0, 440, 441, 7, 11, 0, 0, 441, 442, 7, 12, 0, 0, 442, 443, 7, 13, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 3, 0, 0, 445, 22, 1, 0, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 14, 0, 0, 448, 449, 7, 8, 0, 0, 449, 450, 7, 13, 0, 0, 450, 451, 7, 12, 0, 0, 451, 452, 7, 1, 0, 0, 452, 453, 7, 9, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 4, 3, 0, 455, 24, 1, 0, 0, 0, 456, 457, 7, 15, 0, 0, 457, 458, 7, 6, 0, 0, 458, 459, 7, 7, 0, 0, 459, 460, 7, 16, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 5, 4, 0, 462, 26, 1, 0, 0, 0, 463, 464, 7, 17, 0, 0, 464, 465, 7, 6, 0, 0, 465, 466, 7, 7, 0, 0, 466, 467, 7, 18, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 6, 0, 0, 469, 28, 1, 0, 0, 0, 470, 471, 7, 18, 0, 0, 471, 472, 7, 3, 0, 0, 472, 473, 7, 3, 0, 0, 473, 474, 7, 8, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 7, 1, 0, 476, 30, 1, 0, 0, 0, 477, 478, 7, 13, 0, 0, 478, 479, 7, 1, 0, 0, 479, 480, 7, 16, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 5, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 8, 0, 0, 484, 32, 1, 0, 0, 0, 485, 486, 7, 16, 0, 0, 486, 487, 7, 11, 0, 0, 487, 488, 5, 95, 0, 0, 488, 489, 7, 3, 0, 0, 489, 490, 7, 14, 0, 0, 490, 491, 7, 8, 0, 0, 491, 492, 7, 12, 0, 0, 492, 493, 7, 9, 0, 0, 493, 494, 7, 0, 0, 0, 494, 495, 1, 0, 0, 0, 495, 496, 6, 9, 5, 0, 496, 34, 1, 0, 0, 0, 497, 498, 7, 6, 0, 0, 498, 499, 7, 3, 0, 0, 499, 500, 7, 9, 0, 0, 500, 501, 7, 12, 0, 0, 501, 502, 7, 16, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 10, 6, 0, 505, 36, 1, 0, 0, 0, 506, 507, 7, 6, 0, 0, 507, 508, 7, 7, 0, 0, 508, 509, 7, 19, 0, 0, 509, 510, 1, 0, 0, 0, 510, 511, 6, 11, 0, 0, 511, 38, 1, 0, 0, 0, 512, 513, 7, 2, 0, 0, 513, 514, 7, 10, 0, 0, 514, 515, 7, 7, 0, 0, 515, 516, 7, 19, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 12, 7, 0, 518, 40, 1, 0, 0, 0, 519, 520, 7, 2, 0, 0, 520, 521, 7, 7, 0, 0, 521, 522, 7, 6, 0, 0, 522, 523, 7, 5, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 13, 0, 0, 525, 42, 1, 0, 0, 0, 526, 527, 7, 2, 0, 0, 527, 528, 7, 5, 0, 0, 528, 529, 7, 12, 0, 0, 529, 530, 7, 5, 0, 0, 530, 531, 7, 2, 0, 0, 531, 532, 1, 0, 0, 0, 532, 533, 6, 14, 0, 0, 533, 44, 1, 0, 0, 0, 534, 535, 7, 19, 0, 0, 535, 536, 7, 10, 0, 0, 536, 537, 7, 3, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 3, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 15, 0, 0, 541, 46, 1, 0, 0, 0, 542, 543, 4, 16, 0, 0, 543, 544, 7, 1, 0, 0, 544, 545, 7, 9, 0, 0, 545, 546, 7, 13, 0, 0, 546, 547, 7, 1, 0, 0, 547, 548, 7, 9, 0, 0, 548, 549, 7, 3, 0, 0, 549, 550, 7, 2, 0, 0, 550, 551, 7, 5, 0, 0, 551, 552, 7, 12, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 2, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 16, 0, 0, 556, 48, 1, 0, 0, 0, 557, 558, 4, 17, 1, 0, 558, 559, 7, 13, 0, 0, 559, 560, 7, 7, 0, 0, 560, 561, 7, 7, 0, 0, 561, 562, 7, 18, 0, 0, 562, 563, 7, 20, 0, 0, 563, 564, 7, 8, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 17, 8, 0, 566, 50, 1, 0, 0, 0, 567, 568, 4, 18, 2, 0, 568, 569, 7, 16, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 7, 5, 0, 0, 571, 572, 7, 6, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 4, 0, 0, 574, 575, 7, 2, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 6, 18, 9, 0, 577, 52, 1, 0, 0, 0, 578, 580, 8, 21, 0, 0, 579, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 584, 6, 19, 0, 0, 584, 54, 1, 0, 0, 0, 585, 586, 5, 47, 0, 0, 586, 587, 5, 47, 0, 0, 587, 591, 1, 0, 0, 0, 588, 590, 8, 22, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 596, 5, 13, 0, 0, 595, 594, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 598, 1, 0, 0, 0, 597, 599, 5, 10, 0, 0, 598, 597, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 20, 10, 0, 601, 56, 1, 0, 0, 0, 602, 603, 5, 47, 0, 0, 603, 604, 5, 42, 0, 0, 604, 609, 1, 0, 0, 0, 605, 608, 3, 57, 21, 0, 606, 608, 9, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 606, 1, 0, 0, 0, 608, 611, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 609, 607, 1, 0, 0, 0, 610, 612, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 612, 613, 5, 42, 0, 0, 613, 614, 5, 47, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 21, 10, 0, 616, 58, 1, 0, 0, 0, 617, 619, 7, 23, 0, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 6, 22, 10, 0, 623, 60, 1, 0, 0, 0, 624, 625, 5, 58, 0, 0, 625, 62, 1, 0, 0, 0, 626, 627, 5, 124, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 6, 24, 11, 0, 629, 64, 1, 0, 0, 0, 630, 631, 7, 24, 0, 0, 631, 66, 1, 0, 0, 0, 632, 633, 7, 25, 0, 0, 633, 68, 1, 0, 0, 0, 634, 635, 5, 92, 0, 0, 635, 636, 7, 26, 0, 0, 636, 70, 1, 0, 0, 0, 637, 638, 8, 27, 0, 0, 638, 72, 1, 0, 0, 0, 639, 641, 7, 3, 0, 0, 640, 642, 7, 28, 0, 0, 641, 640, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 644, 1, 0, 0, 0, 643, 645, 3, 65, 25, 0, 644, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 644, 1, 0, 0, 0, 646, 647, 1, 0, 0, 0, 647, 74, 1, 0, 0, 0, 648, 649, 5, 64, 0, 0, 649, 76, 1, 0, 0, 0, 650, 651, 5, 96, 0, 0, 651, 78, 1, 0, 0, 0, 652, 656, 8, 29, 0, 0, 653, 654, 5, 96, 0, 0, 654, 656, 5, 96, 0, 0, 655, 652, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 656, 80, 1, 0, 0, 0, 657, 658, 5, 95, 0, 0, 658, 82, 1, 0, 0, 0, 659, 663, 3, 67, 26, 0, 660, 663, 3, 65, 25, 0, 661, 663, 3, 81, 33, 0, 662, 659, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 84, 1, 0, 0, 0, 664, 669, 5, 34, 0, 0, 665, 668, 3, 69, 27, 0, 666, 668, 3, 71, 28, 0, 667, 665, 1, 0, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 694, 5, 34, 0, 0, 673, 674, 5, 34, 0, 0, 674, 675, 5, 34, 0, 0, 675, 676, 5, 34, 0, 0, 676, 680, 1, 0, 0, 0, 677, 679, 8, 22, 0, 0, 678, 677, 1, 0, 0, 0, 679, 682, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 683, 1, 0, 0, 0, 682, 680, 1, 0, 0, 0, 683, 684, 5, 34, 0, 0, 684, 685, 5, 34, 0, 0, 685, 686, 5, 34, 0, 0, 686, 688, 1, 0, 0, 0, 687, 689, 5, 34, 0, 0, 688, 687, 1, 0, 0, 0, 688, 689, 1, 0, 0, 0, 689, 691, 1, 0, 0, 0, 690, 692, 5, 34, 0, 0, 691, 690, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 694, 1, 0, 0, 0, 693, 664, 1, 0, 0, 0, 693, 673, 1, 0, 0, 0, 694, 86, 1, 0, 0, 0, 695, 697, 3, 65, 25, 0, 696, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 88, 1, 0, 0, 0, 700, 702, 3, 65, 25, 0, 701, 700, 1, 0, 0, 0, 702, 703, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 709, 3, 105, 45, 0, 706, 708, 3, 65, 25, 0, 707, 706, 1, 0, 0, 0, 708, 711, 1, 0, 0, 0, 709, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 743, 1, 0, 0, 0, 711, 709, 1, 0, 0, 0, 712, 714, 3, 105, 45, 0, 713, 715, 3, 65, 25, 0, 714, 713, 1, 0, 0, 0, 715, 716, 1, 0, 0, 0, 716, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 743, 1, 0, 0, 0, 718, 720, 3, 65, 25, 0, 719, 718, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 730, 1, 0, 0, 0, 723, 727, 3, 105, 45, 0, 724, 726, 3, 65, 25, 0, 725, 724, 1, 0, 0, 0, 726, 729, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 731, 1, 0, 0, 0, 729, 727, 1, 0, 0, 0, 730, 723, 1, 0, 0, 0, 730, 731, 1, 0, 0, 0, 731, 732, 1, 0, 0, 0, 732, 733, 3, 73, 29, 0, 733, 743, 1, 0, 0, 0, 734, 736, 3, 105, 45, 0, 735, 737, 3, 65, 25, 0, 736, 735, 1, 0, 0, 0, 737, 738, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 739, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 741, 3, 73, 29, 0, 741, 743, 1, 0, 0, 0, 742, 701, 1, 0, 0, 0, 742, 712, 1, 0, 0, 0, 742, 719, 1, 0, 0, 0, 742, 734, 1, 0, 0, 0, 743, 90, 1, 0, 0, 0, 744, 745, 7, 30, 0, 0, 745, 746, 7, 31, 0, 0, 746, 92, 1, 0, 0, 0, 747, 748, 7, 12, 0, 0, 748, 749, 7, 9, 0, 0, 749, 750, 7, 0, 0, 0, 750, 94, 1, 0, 0, 0, 751, 752, 7, 12, 0, 0, 752, 753, 7, 2, 0, 0, 753, 754, 7, 4, 0, 0, 754, 96, 1, 0, 0, 0, 755, 756, 5, 61, 0, 0, 756, 98, 1, 0, 0, 0, 757, 758, 5, 58, 0, 0, 758, 759, 5, 58, 0, 0, 759, 100, 1, 0, 0, 0, 760, 761, 5, 44, 0, 0, 761, 102, 1, 0, 0, 0, 762, 763, 7, 0, 0, 0, 763, 764, 7, 3, 0, 0, 764, 765, 7, 2, 0, 0, 765, 766, 7, 4, 0, 0, 766, 104, 1, 0, 0, 0, 767, 768, 5, 46, 0, 0, 768, 106, 1, 0, 0, 0, 769, 770, 7, 15, 0, 0, 770, 771, 7, 12, 0, 0, 771, 772, 7, 13, 0, 0, 772, 773, 7, 2, 0, 0, 773, 774, 7, 3, 0, 0, 774, 108, 1, 0, 0, 0, 775, 776, 7, 15, 0, 0, 776, 777, 7, 1, 0, 0, 777, 778, 7, 6, 0, 0, 778, 779, 7, 2, 0, 0, 779, 780, 7, 5, 0, 0, 780, 110, 1, 0, 0, 0, 781, 782, 7, 1, 0, 0, 782, 783, 7, 9, 0, 0, 783, 112, 1, 0, 0, 0, 784, 785, 7, 1, 0, 0, 785, 786, 7, 2, 0, 0, 786, 114, 1, 0, 0, 0, 787, 788, 7, 13, 0, 0, 788, 789, 7, 12, 0, 0, 789, 790, 7, 2, 0, 0, 790, 791, 7, 5, 0, 0, 791, 116, 1, 0, 0, 0, 792, 793, 7, 13, 0, 0, 793, 794, 7, 1, 0, 0, 794, 795, 7, 18, 0, 0, 795, 796, 7, 3, 0, 0, 796, 118, 1, 0, 0, 0, 797, 798, 5, 40, 0, 0, 798, 120, 1, 0, 0, 0, 799, 800, 7, 9, 0, 0, 800, 801, 7, 7, 0, 0, 801, 802, 7, 5, 0, 0, 802, 122, 1, 0, 0, 0, 803, 804, 7, 9, 0, 0, 804, 805, 7, 20, 0, 0, 805, 806, 7, 13, 0, 0, 806, 807, 7, 13, 0, 0, 807, 124, 1, 0, 0, 0, 808, 809, 7, 9, 0, 0, 809, 810, 7, 20, 0, 0, 810, 811, 7, 13, 0, 0, 811, 812, 7, 13, 0, 0, 812, 813, 7, 2, 0, 0, 813, 126, 1, 0, 0, 0, 814, 815, 7, 7, 0, 0, 815, 816, 7, 6, 0, 0, 816, 128, 1, 0, 0, 0, 817, 818, 5, 63, 0, 0, 818, 130, 1, 0, 0, 0, 819, 820, 7, 6, 0, 0, 820, 821, 7, 13, 0, 0, 821, 822, 7, 1, 0, 0, 822, 823, 7, 18, 0, 0, 823, 824, 7, 3, 0, 0, 824, 132, 1, 0, 0, 0, 825, 826, 5, 41, 0, 0, 826, 134, 1, 0, 0, 0, 827, 828, 7, 5, 0, 0, 828, 829, 7, 6, 0, 0, 829, 830, 7, 20, 0, 0, 830, 831, 7, 3, 0, 0, 831, 136, 1, 0, 0, 0, 832, 833, 5, 61, 0, 0, 833, 834, 5, 61, 0, 0, 834, 138, 1, 0, 0, 0, 835, 836, 5, 61, 0, 0, 836, 837, 5, 126, 0, 0, 837, 140, 1, 0, 0, 0, 838, 839, 5, 33, 0, 0, 839, 840, 5, 61, 0, 0, 840, 142, 1, 0, 0, 0, 841, 842, 5, 60, 0, 0, 842, 144, 1, 0, 0, 0, 843, 844, 5, 60, 0, 0, 844, 845, 5, 61, 0, 0, 845, 146, 1, 0, 0, 0, 846, 847, 5, 62, 0, 0, 847, 148, 1, 0, 0, 0, 848, 849, 5, 62, 0, 0, 849, 850, 5, 61, 0, 0, 850, 150, 1, 0, 0, 0, 851, 852, 5, 43, 0, 0, 852, 152, 1, 0, 0, 0, 853, 854, 5, 45, 0, 0, 854, 154, 1, 0, 0, 0, 855, 856, 5, 42, 0, 0, 856, 156, 1, 0, 0, 0, 857, 858, 5, 47, 0, 0, 858, 158, 1, 0, 0, 0, 859, 860, 5, 37, 0, 0, 860, 160, 1, 0, 0, 0, 861, 862, 4, 73, 3, 0, 862, 863, 3, 61, 23, 0, 863, 864, 1, 0, 0, 0, 864, 865, 6, 73, 12, 0, 865, 162, 1, 0, 0, 0, 866, 867, 3, 45, 15, 0, 867, 868, 1, 0, 0, 0, 868, 869, 6, 74, 13, 0, 869, 164, 1, 0, 0, 0, 870, 873, 3, 129, 57, 0, 871, 874, 3, 67, 26, 0, 872, 874, 3, 81, 33, 0, 873, 871, 1, 0, 0, 0, 873, 872, 1, 0, 0, 0, 874, 878, 1, 0, 0, 0, 875, 877, 3, 83, 34, 0, 876, 875, 1, 0, 0, 0, 877, 880, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 878, 879, 1, 0, 0, 0, 879, 888, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 883, 3, 129, 57, 0, 882, 884, 3, 65, 25, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 888, 1, 0, 0, 0, 887, 870, 1, 0, 0, 0, 887, 881, 1, 0, 0, 0, 888, 166, 1, 0, 0, 0, 889, 890, 5, 91, 0, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 76, 0, 0, 892, 893, 6, 76, 0, 0, 893, 168, 1, 0, 0, 0, 894, 895, 5, 93, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 77, 11, 0, 897, 898, 6, 77, 11, 0, 898, 170, 1, 0, 0, 0, 899, 903, 3, 67, 26, 0, 900, 902, 3, 83, 34, 0, 901, 900, 1, 0, 0, 0, 902, 905, 1, 0, 0, 0, 903, 901, 1, 0, 0, 0, 903, 904, 1, 0, 0, 0, 904, 916, 1, 0, 0, 0, 905, 903, 1, 0, 0, 0, 906, 909, 3, 81, 33, 0, 907, 909, 3, 75, 30, 0, 908, 906, 1, 0, 0, 0, 908, 907, 1, 0, 0, 0, 909, 911, 1, 0, 0, 0, 910, 912, 3, 83, 34, 0, 911, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 911, 1, 0, 0, 0, 913, 914, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 899, 1, 0, 0, 0, 915, 908, 1, 0, 0, 0, 916, 172, 1, 0, 0, 0, 917, 919, 3, 77, 31, 0, 918, 920, 3, 79, 32, 0, 919, 918, 1, 0, 0, 0, 920, 921, 1, 0, 0, 0, 921, 919, 1, 0, 0, 0, 921, 922, 1, 0, 0, 0, 922, 923, 1, 0, 0, 0, 923, 924, 3, 77, 31, 0, 924, 174, 1, 0, 0, 0, 925, 926, 3, 173, 79, 0, 926, 176, 1, 0, 0, 0, 927, 928, 3, 55, 20, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 81, 10, 0, 930, 178, 1, 0, 0, 0, 931, 932, 3, 57, 21, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 82, 10, 0, 934, 180, 1, 0, 0, 0, 935, 936, 3, 59, 22, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 83, 10, 0, 938, 182, 1, 0, 0, 0, 939, 940, 3, 167, 76, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 84, 14, 0, 942, 943, 6, 84, 15, 0, 943, 184, 1, 0, 0, 0, 944, 945, 3, 63, 24, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 85, 16, 0, 947, 948, 6, 85, 11, 0, 948, 186, 1, 0, 0, 0, 949, 950, 3, 59, 22, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 86, 10, 0, 952, 188, 1, 0, 0, 0, 953, 954, 3, 55, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 87, 10, 0, 956, 190, 1, 0, 0, 0, 957, 958, 3, 57, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 88, 10, 0, 960, 192, 1, 0, 0, 0, 961, 962, 3, 63, 24, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 89, 16, 0, 964, 965, 6, 89, 11, 0, 965, 194, 1, 0, 0, 0, 966, 967, 3, 167, 76, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 90, 14, 0, 969, 196, 1, 0, 0, 0, 970, 971, 3, 169, 77, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 91, 17, 0, 973, 198, 1, 0, 0, 0, 974, 975, 3, 61, 23, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 92, 12, 0, 977, 200, 1, 0, 0, 0, 978, 979, 3, 101, 43, 0, 979, 980, 1, 0, 0, 0, 980, 981, 6, 93, 18, 0, 981, 202, 1, 0, 0, 0, 982, 983, 3, 97, 41, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 94, 19, 0, 985, 204, 1, 0, 0, 0, 986, 987, 7, 16, 0, 0, 987, 988, 7, 3, 0, 0, 988, 989, 7, 5, 0, 0, 989, 990, 7, 12, 0, 0, 990, 991, 7, 0, 0, 0, 991, 992, 7, 12, 0, 0, 992, 993, 7, 5, 0, 0, 993, 994, 7, 12, 0, 0, 994, 206, 1, 0, 0, 0, 995, 999, 8, 32, 0, 0, 996, 997, 5, 47, 0, 0, 997, 999, 8, 33, 0, 0, 998, 995, 1, 0, 0, 0, 998, 996, 1, 0, 0, 0, 999, 208, 1, 0, 0, 0, 1000, 1002, 3, 207, 96, 0, 1001, 1000, 1, 0, 0, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 210, 1, 0, 0, 0, 1005, 1006, 3, 209, 97, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 98, 20, 0, 1008, 212, 1, 0, 0, 0, 1009, 1010, 3, 85, 35, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 99, 21, 0, 1012, 214, 1, 0, 0, 0, 1013, 1014, 3, 55, 20, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 100, 10, 0, 1016, 216, 1, 0, 0, 0, 1017, 1018, 3, 57, 21, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 101, 10, 0, 1020, 218, 1, 0, 0, 0, 1021, 1022, 3, 59, 22, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 102, 10, 0, 1024, 220, 1, 0, 0, 0, 1025, 1026, 3, 63, 24, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 103, 16, 0, 1028, 1029, 6, 103, 11, 0, 1029, 222, 1, 0, 0, 0, 1030, 1031, 3, 105, 45, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 104, 22, 0, 1033, 224, 1, 0, 0, 0, 1034, 1035, 3, 101, 43, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 105, 18, 0, 1037, 226, 1, 0, 0, 0, 1038, 1039, 4, 106, 4, 0, 1039, 1040, 3, 129, 57, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 106, 23, 0, 1042, 228, 1, 0, 0, 0, 1043, 1044, 4, 107, 5, 0, 1044, 1045, 3, 165, 75, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 107, 24, 0, 1047, 230, 1, 0, 0, 0, 1048, 1053, 3, 67, 26, 0, 1049, 1053, 3, 65, 25, 0, 1050, 1053, 3, 81, 33, 0, 1051, 1053, 3, 155, 70, 0, 1052, 1048, 1, 0, 0, 0, 1052, 1049, 1, 0, 0, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 232, 1, 0, 0, 0, 1054, 1057, 3, 67, 26, 0, 1055, 1057, 3, 155, 70, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1055, 1, 0, 0, 0, 1057, 1061, 1, 0, 0, 0, 1058, 1060, 3, 231, 108, 0, 1059, 1058, 1, 0, 0, 0, 1060, 1063, 1, 0, 0, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1074, 1, 0, 0, 0, 1063, 1061, 1, 0, 0, 0, 1064, 1067, 3, 81, 33, 0, 1065, 1067, 3, 75, 30, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1065, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1070, 3, 231, 108, 0, 1069, 1068, 1, 0, 0, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1074, 1, 0, 0, 0, 1073, 1056, 1, 0, 0, 0, 1073, 1066, 1, 0, 0, 0, 1074, 234, 1, 0, 0, 0, 1075, 1078, 3, 233, 109, 0, 1076, 1078, 3, 173, 79, 0, 1077, 1075, 1, 0, 0, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 236, 1, 0, 0, 0, 1081, 1082, 3, 55, 20, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 111, 10, 0, 1084, 238, 1, 0, 0, 0, 1085, 1086, 3, 57, 21, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 112, 10, 0, 1088, 240, 1, 0, 0, 0, 1089, 1090, 3, 59, 22, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 113, 10, 0, 1092, 242, 1, 0, 0, 0, 1093, 1094, 3, 63, 24, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 114, 16, 0, 1096, 1097, 6, 114, 11, 0, 1097, 244, 1, 0, 0, 0, 1098, 1099, 3, 97, 41, 0, 1099, 1100, 1, 0, 0, 0, 1100, 1101, 6, 115, 19, 0, 1101, 246, 1, 0, 0, 0, 1102, 1103, 3, 101, 43, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 116, 18, 0, 1105, 248, 1, 0, 0, 0, 1106, 1107, 3, 105, 45, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 117, 22, 0, 1109, 250, 1, 0, 0, 0, 1110, 1111, 4, 118, 6, 0, 1111, 1112, 3, 129, 57, 0, 1112, 1113, 1, 0, 0, 0, 1113, 1114, 6, 118, 23, 0, 1114, 252, 1, 0, 0, 0, 1115, 1116, 4, 119, 7, 0, 1116, 1117, 3, 165, 75, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 119, 24, 0, 1119, 254, 1, 0, 0, 0, 1120, 1121, 7, 12, 0, 0, 1121, 1122, 7, 2, 0, 0, 1122, 256, 1, 0, 0, 0, 1123, 1124, 3, 235, 110, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 121, 25, 0, 1126, 258, 1, 0, 0, 0, 1127, 1128, 3, 55, 20, 0, 1128, 1129, 1, 0, 0, 0, 1129, 1130, 6, 122, 10, 0, 1130, 260, 1, 0, 0, 0, 1131, 1132, 3, 57, 21, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 6, 123, 10, 0, 1134, 262, 1, 0, 0, 0, 1135, 1136, 3, 59, 22, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 124, 10, 0, 1138, 264, 1, 0, 0, 0, 1139, 1140, 3, 63, 24, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1142, 6, 125, 16, 0, 1142, 1143, 6, 125, 11, 0, 1143, 266, 1, 0, 0, 0, 1144, 1145, 3, 167, 76, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 126, 14, 0, 1147, 1148, 6, 126, 26, 0, 1148, 268, 1, 0, 0, 0, 1149, 1150, 7, 7, 0, 0, 1150, 1151, 7, 9, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 127, 27, 0, 1153, 270, 1, 0, 0, 0, 1154, 1155, 7, 19, 0, 0, 1155, 1156, 7, 1, 0, 0, 1156, 1157, 7, 5, 0, 0, 1157, 1158, 7, 10, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 128, 27, 0, 1160, 272, 1, 0, 0, 0, 1161, 1162, 8, 34, 0, 0, 1162, 274, 1, 0, 0, 0, 1163, 1165, 3, 273, 129, 0, 1164, 1163, 1, 0, 0, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1164, 1, 0, 0, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 3, 61, 23, 0, 1169, 1171, 1, 0, 0, 0, 1170, 1164, 1, 0, 0, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1173, 1, 0, 0, 0, 1172, 1174, 3, 273, 129, 0, 1173, 1172, 1, 0, 0, 0, 1174, 1175, 1, 0, 0, 0, 1175, 1173, 1, 0, 0, 0, 1175, 1176, 1, 0, 0, 0, 1176, 276, 1, 0, 0, 0, 1177, 1178, 3, 275, 130, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 131, 28, 0, 1180, 278, 1, 0, 0, 0, 1181, 1182, 3, 55, 20, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 132, 10, 0, 1184, 280, 1, 0, 0, 0, 1185, 1186, 3, 57, 21, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 133, 10, 0, 1188, 282, 1, 0, 0, 0, 1189, 1190, 3, 59, 22, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 134, 10, 0, 1192, 284, 1, 0, 0, 0, 1193, 1194, 3, 63, 24, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 135, 16, 0, 1196, 1197, 6, 135, 11, 0, 1197, 1198, 6, 135, 11, 0, 1198, 286, 1, 0, 0, 0, 1199, 1200, 3, 97, 41, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 136, 19, 0, 1202, 288, 1, 0, 0, 0, 1203, 1204, 3, 101, 43, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 137, 18, 0, 1206, 290, 1, 0, 0, 0, 1207, 1208, 3, 105, 45, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 138, 22, 0, 1210, 292, 1, 0, 0, 0, 1211, 1212, 3, 271, 128, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 139, 29, 0, 1214, 294, 1, 0, 0, 0, 1215, 1216, 3, 235, 110, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 140, 25, 0, 1218, 296, 1, 0, 0, 0, 1219, 1220, 3, 175, 80, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 141, 30, 0, 1222, 298, 1, 0, 0, 0, 1223, 1224, 4, 142, 8, 0, 1224, 1225, 3, 129, 57, 0, 1225, 1226, 1, 0, 0, 0, 1226, 1227, 6, 142, 23, 0, 1227, 300, 1, 0, 0, 0, 1228, 1229, 4, 143, 9, 0, 1229, 1230, 3, 165, 75, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 143, 24, 0, 1232, 302, 1, 0, 0, 0, 1233, 1234, 3, 55, 20, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 144, 10, 0, 1236, 304, 1, 0, 0, 0, 1237, 1238, 3, 57, 21, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 145, 10, 0, 1240, 306, 1, 0, 0, 0, 1241, 1242, 3, 59, 22, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 146, 10, 0, 1244, 308, 1, 0, 0, 0, 1245, 1246, 3, 63, 24, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 147, 16, 0, 1248, 1249, 6, 147, 11, 0, 1249, 310, 1, 0, 0, 0, 1250, 1251, 3, 105, 45, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 148, 22, 0, 1253, 312, 1, 0, 0, 0, 1254, 1255, 4, 149, 10, 0, 1255, 1256, 3, 129, 57, 0, 1256, 1257, 1, 0, 0, 0, 1257, 1258, 6, 149, 23, 0, 1258, 314, 1, 0, 0, 0, 1259, 1260, 4, 150, 11, 0, 1260, 1261, 3, 165, 75, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 150, 24, 0, 1263, 316, 1, 0, 0, 0, 1264, 1265, 3, 175, 80, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 151, 30, 0, 1267, 318, 1, 0, 0, 0, 1268, 1269, 3, 171, 78, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 152, 31, 0, 1271, 320, 1, 0, 0, 0, 1272, 1273, 3, 55, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 153, 10, 0, 1275, 322, 1, 0, 0, 0, 1276, 1277, 3, 57, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 154, 10, 0, 1279, 324, 1, 0, 0, 0, 1280, 1281, 3, 59, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 155, 10, 0, 1283, 326, 1, 0, 0, 0, 1284, 1285, 3, 63, 24, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 156, 16, 0, 1287, 1288, 6, 156, 11, 0, 1288, 328, 1, 0, 0, 0, 1289, 1290, 7, 1, 0, 0, 1290, 1291, 7, 9, 0, 0, 1291, 1292, 7, 15, 0, 0, 1292, 1293, 7, 7, 0, 0, 1293, 330, 1, 0, 0, 0, 1294, 1295, 3, 55, 20, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 158, 10, 0, 1297, 332, 1, 0, 0, 0, 1298, 1299, 3, 57, 21, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 159, 10, 0, 1301, 334, 1, 0, 0, 0, 1302, 1303, 3, 59, 22, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 160, 10, 0, 1305, 336, 1, 0, 0, 0, 1306, 1307, 3, 169, 77, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 161, 17, 0, 1309, 1310, 6, 161, 11, 0, 1310, 338, 1, 0, 0, 0, 1311, 1312, 3, 61, 23, 0, 1312, 1313, 1, 0, 0, 0, 1313, 1314, 6, 162, 12, 0, 1314, 340, 1, 0, 0, 0, 1315, 1321, 3, 75, 30, 0, 1316, 1321, 3, 65, 25, 0, 1317, 1321, 3, 105, 45, 0, 1318, 1321, 3, 67, 26, 0, 1319, 1321, 3, 81, 33, 0, 1320, 1315, 1, 0, 0, 0, 1320, 1316, 1, 0, 0, 0, 1320, 1317, 1, 0, 0, 0, 1320, 1318, 1, 0, 0, 0, 1320, 1319, 1, 0, 0, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1320, 1, 0, 0, 0, 1322, 1323, 1, 0, 0, 0, 1323, 342, 1, 0, 0, 0, 1324, 1325, 3, 55, 20, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 164, 10, 0, 1327, 344, 1, 0, 0, 0, 1328, 1329, 3, 57, 21, 0, 1329, 1330, 1, 0, 0, 0, 1330, 1331, 6, 165, 10, 0, 1331, 346, 1, 0, 0, 0, 1332, 1333, 3, 59, 22, 0, 1333, 1334, 1, 0, 0, 0, 1334, 1335, 6, 166, 10, 0, 1335, 348, 1, 0, 0, 0, 1336, 1337, 3, 63, 24, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 167, 16, 0, 1339, 1340, 6, 167, 11, 0, 1340, 350, 1, 0, 0, 0, 1341, 1342, 3, 61, 23, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 168, 12, 0, 1344, 352, 1, 0, 0, 0, 1345, 1346, 3, 101, 43, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 169, 18, 0, 1348, 354, 1, 0, 0, 0, 1349, 1350, 3, 105, 45, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 170, 22, 0, 1352, 356, 1, 0, 0, 0, 1353, 1354, 3, 269, 127, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 171, 32, 0, 1356, 1357, 6, 171, 33, 0, 1357, 358, 1, 0, 0, 0, 1358, 1359, 3, 209, 97, 0, 1359, 1360, 1, 0, 0, 0, 1360, 1361, 6, 172, 20, 0, 1361, 360, 1, 0, 0, 0, 1362, 1363, 3, 85, 35, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 173, 21, 0, 1365, 362, 1, 0, 0, 0, 1366, 1367, 3, 55, 20, 0, 1367, 1368, 1, 0, 0, 0, 1368, 1369, 6, 174, 10, 0, 1369, 364, 1, 0, 0, 0, 1370, 1371, 3, 57, 21, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 175, 10, 0, 1373, 366, 1, 0, 0, 0, 1374, 1375, 3, 59, 22, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 176, 10, 0, 1377, 368, 1, 0, 0, 0, 1378, 1379, 3, 63, 24, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 177, 16, 0, 1381, 1382, 6, 177, 11, 0, 1382, 1383, 6, 177, 11, 0, 1383, 370, 1, 0, 0, 0, 1384, 1385, 3, 101, 43, 0, 1385, 1386, 1, 0, 0, 0, 1386, 1387, 6, 178, 18, 0, 1387, 372, 1, 0, 0, 0, 1388, 1389, 3, 105, 45, 0, 1389, 1390, 1, 0, 0, 0, 1390, 1391, 6, 179, 22, 0, 1391, 374, 1, 0, 0, 0, 1392, 1393, 3, 235, 110, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 180, 25, 0, 1395, 376, 1, 0, 0, 0, 1396, 1397, 3, 55, 20, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 181, 10, 0, 1399, 378, 1, 0, 0, 0, 1400, 1401, 3, 57, 21, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 182, 10, 0, 1403, 380, 1, 0, 0, 0, 1404, 1405, 3, 59, 22, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 183, 10, 0, 1407, 382, 1, 0, 0, 0, 1408, 1409, 3, 63, 24, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 184, 16, 0, 1411, 1412, 6, 184, 11, 0, 1412, 384, 1, 0, 0, 0, 1413, 1414, 3, 209, 97, 0, 1414, 1415, 1, 0, 0, 0, 1415, 1416, 6, 185, 20, 0, 1416, 1417, 6, 185, 11, 0, 1417, 1418, 6, 185, 34, 0, 1418, 386, 1, 0, 0, 0, 1419, 1420, 3, 85, 35, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 186, 21, 0, 1422, 1423, 6, 186, 11, 0, 1423, 1424, 6, 186, 34, 0, 1424, 388, 1, 0, 0, 0, 1425, 1426, 3, 55, 20, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 187, 10, 0, 1428, 390, 1, 0, 0, 0, 1429, 1430, 3, 57, 21, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 188, 10, 0, 1432, 392, 1, 0, 0, 0, 1433, 1434, 3, 59, 22, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 189, 10, 0, 1436, 394, 1, 0, 0, 0, 1437, 1438, 3, 61, 23, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 190, 12, 0, 1440, 1441, 6, 190, 11, 0, 1441, 1442, 6, 190, 9, 0, 1442, 396, 1, 0, 0, 0, 1443, 1444, 3, 101, 43, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 191, 18, 0, 1446, 1447, 6, 191, 11, 0, 1447, 1448, 6, 191, 9, 0, 1448, 398, 1, 0, 0, 0, 1449, 1450, 3, 55, 20, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 192, 10, 0, 1452, 400, 1, 0, 0, 0, 1453, 1454, 3, 57, 21, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 193, 10, 0, 1456, 402, 1, 0, 0, 0, 1457, 1458, 3, 59, 22, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 194, 10, 0, 1460, 404, 1, 0, 0, 0, 1461, 1462, 3, 175, 80, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 195, 11, 0, 1464, 1465, 6, 195, 0, 0, 1465, 1466, 6, 195, 30, 0, 1466, 406, 1, 0, 0, 0, 1467, 1468, 3, 171, 78, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 196, 11, 0, 1470, 1471, 6, 196, 0, 0, 1471, 1472, 6, 196, 31, 0, 1472, 408, 1, 0, 0, 0, 1473, 1474, 3, 91, 38, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 197, 11, 0, 1476, 1477, 6, 197, 0, 0, 1477, 1478, 6, 197, 35, 0, 1478, 410, 1, 0, 0, 0, 1479, 1480, 3, 63, 24, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 198, 16, 0, 1482, 1483, 6, 198, 11, 0, 1483, 412, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 581, 591, 595, 598, 607, 609, 620, 641, 646, 655, 662, 667, 669, 680, 688, 691, 693, 698, 703, 709, 716, 721, 727, 730, 738, 742, 873, 878, 885, 887, 903, 908, 913, 915, 921, 998, 1003, 1052, 1056, 1061, 1066, 1071, 1073, 1077, 1079, 1166, 1170, 1175, 1320, 1322, 36, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 24, 0, 7, 16, 0, 7, 65, 0, 5, 0, 0, 7, 25, 0, 7, 66, 0, 7, 34, 0, 7, 32, 0, 7, 76, 0, 7, 26, 0, 7, 36, 0, 7, 48, 0, 7, 64, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 29, 0] \ No newline at end of file +[4, 0, 128, 1601, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 654, 8, 24, 11, 24, 12, 24, 655, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 664, 8, 25, 10, 25, 12, 25, 667, 9, 25, 1, 25, 3, 25, 670, 8, 25, 1, 25, 3, 25, 673, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 682, 8, 26, 10, 26, 12, 26, 685, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 693, 8, 27, 11, 27, 12, 27, 694, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 714, 8, 33, 1, 33, 4, 33, 717, 8, 33, 11, 33, 12, 33, 718, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 728, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 735, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 740, 8, 39, 10, 39, 12, 39, 743, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 751, 8, 39, 10, 39, 12, 39, 754, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 761, 8, 39, 1, 39, 3, 39, 764, 8, 39, 3, 39, 766, 8, 39, 1, 40, 4, 40, 769, 8, 40, 11, 40, 12, 40, 770, 1, 41, 4, 41, 774, 8, 41, 11, 41, 12, 41, 775, 1, 41, 1, 41, 5, 41, 780, 8, 41, 10, 41, 12, 41, 783, 9, 41, 1, 41, 1, 41, 4, 41, 787, 8, 41, 11, 41, 12, 41, 788, 1, 41, 4, 41, 792, 8, 41, 11, 41, 12, 41, 793, 1, 41, 1, 41, 5, 41, 798, 8, 41, 10, 41, 12, 41, 801, 9, 41, 3, 41, 803, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 809, 8, 41, 11, 41, 12, 41, 810, 1, 41, 1, 41, 3, 41, 815, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 3, 79, 943, 8, 79, 1, 79, 5, 79, 946, 8, 79, 10, 79, 12, 79, 949, 9, 79, 1, 79, 1, 79, 4, 79, 953, 8, 79, 11, 79, 12, 79, 954, 3, 79, 957, 8, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 5, 82, 971, 8, 82, 10, 82, 12, 82, 974, 9, 82, 1, 82, 1, 82, 3, 82, 978, 8, 82, 1, 82, 4, 82, 981, 8, 82, 11, 82, 12, 82, 982, 3, 82, 985, 8, 82, 1, 83, 1, 83, 4, 83, 989, 8, 83, 11, 83, 12, 83, 990, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 3, 100, 1068, 8, 100, 1, 101, 4, 101, 1071, 8, 101, 11, 101, 12, 101, 1072, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 3, 112, 1122, 8, 112, 1, 113, 1, 113, 3, 113, 1126, 8, 113, 1, 113, 5, 113, 1129, 8, 113, 10, 113, 12, 113, 1132, 9, 113, 1, 113, 1, 113, 3, 113, 1136, 8, 113, 1, 113, 4, 113, 1139, 8, 113, 11, 113, 12, 113, 1140, 3, 113, 1143, 8, 113, 1, 114, 1, 114, 4, 114, 1147, 8, 114, 11, 114, 12, 114, 1148, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 4, 134, 1234, 8, 134, 11, 134, 12, 134, 1235, 1, 134, 1, 134, 3, 134, 1240, 8, 134, 1, 134, 4, 134, 1243, 8, 134, 11, 134, 12, 134, 1244, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 4, 167, 1390, 8, 167, 11, 167, 12, 167, 1391, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 2, 683, 752, 0, 213, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 69, 176, 70, 178, 71, 180, 72, 182, 0, 184, 73, 186, 74, 188, 75, 190, 76, 192, 0, 194, 0, 196, 77, 198, 78, 200, 79, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 80, 216, 0, 218, 81, 220, 0, 222, 0, 224, 82, 226, 83, 228, 84, 230, 0, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 88, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 89, 266, 0, 268, 90, 270, 91, 272, 92, 274, 0, 276, 0, 278, 93, 280, 94, 282, 0, 284, 95, 286, 0, 288, 96, 290, 97, 292, 98, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 99, 314, 100, 316, 101, 318, 0, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 102, 332, 103, 334, 104, 336, 0, 338, 105, 340, 106, 342, 107, 344, 108, 346, 0, 348, 0, 350, 109, 352, 110, 354, 111, 356, 112, 358, 0, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 113, 374, 114, 376, 115, 378, 0, 380, 0, 382, 0, 384, 0, 386, 116, 388, 117, 390, 118, 392, 0, 394, 0, 396, 0, 398, 0, 400, 119, 402, 0, 404, 0, 406, 120, 408, 121, 410, 122, 412, 0, 414, 0, 416, 0, 418, 123, 420, 124, 422, 125, 424, 0, 426, 0, 428, 126, 430, 127, 432, 128, 434, 0, 436, 0, 438, 0, 440, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1628, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 244, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 8, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 9, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 10, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 11, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 12, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 13, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 14, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 15, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 16, 442, 1, 0, 0, 0, 18, 452, 1, 0, 0, 0, 20, 459, 1, 0, 0, 0, 22, 468, 1, 0, 0, 0, 24, 475, 1, 0, 0, 0, 26, 485, 1, 0, 0, 0, 28, 492, 1, 0, 0, 0, 30, 499, 1, 0, 0, 0, 32, 506, 1, 0, 0, 0, 34, 514, 1, 0, 0, 0, 36, 526, 1, 0, 0, 0, 38, 535, 1, 0, 0, 0, 40, 541, 1, 0, 0, 0, 42, 548, 1, 0, 0, 0, 44, 555, 1, 0, 0, 0, 46, 563, 1, 0, 0, 0, 48, 571, 1, 0, 0, 0, 50, 586, 1, 0, 0, 0, 52, 598, 1, 0, 0, 0, 54, 609, 1, 0, 0, 0, 56, 617, 1, 0, 0, 0, 58, 625, 1, 0, 0, 0, 60, 633, 1, 0, 0, 0, 62, 642, 1, 0, 0, 0, 64, 653, 1, 0, 0, 0, 66, 659, 1, 0, 0, 0, 68, 676, 1, 0, 0, 0, 70, 692, 1, 0, 0, 0, 72, 698, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 704, 1, 0, 0, 0, 78, 706, 1, 0, 0, 0, 80, 709, 1, 0, 0, 0, 82, 711, 1, 0, 0, 0, 84, 720, 1, 0, 0, 0, 86, 722, 1, 0, 0, 0, 88, 727, 1, 0, 0, 0, 90, 729, 1, 0, 0, 0, 92, 734, 1, 0, 0, 0, 94, 765, 1, 0, 0, 0, 96, 768, 1, 0, 0, 0, 98, 814, 1, 0, 0, 0, 100, 816, 1, 0, 0, 0, 102, 819, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 829, 1, 0, 0, 0, 110, 832, 1, 0, 0, 0, 112, 834, 1, 0, 0, 0, 114, 836, 1, 0, 0, 0, 116, 841, 1, 0, 0, 0, 118, 843, 1, 0, 0, 0, 120, 849, 1, 0, 0, 0, 122, 855, 1, 0, 0, 0, 124, 858, 1, 0, 0, 0, 126, 861, 1, 0, 0, 0, 128, 866, 1, 0, 0, 0, 130, 871, 1, 0, 0, 0, 132, 873, 1, 0, 0, 0, 134, 877, 1, 0, 0, 0, 136, 882, 1, 0, 0, 0, 138, 888, 1, 0, 0, 0, 140, 891, 1, 0, 0, 0, 142, 893, 1, 0, 0, 0, 144, 899, 1, 0, 0, 0, 146, 901, 1, 0, 0, 0, 148, 906, 1, 0, 0, 0, 150, 909, 1, 0, 0, 0, 152, 912, 1, 0, 0, 0, 154, 915, 1, 0, 0, 0, 156, 917, 1, 0, 0, 0, 158, 920, 1, 0, 0, 0, 160, 922, 1, 0, 0, 0, 162, 925, 1, 0, 0, 0, 164, 927, 1, 0, 0, 0, 166, 929, 1, 0, 0, 0, 168, 931, 1, 0, 0, 0, 170, 933, 1, 0, 0, 0, 172, 935, 1, 0, 0, 0, 174, 956, 1, 0, 0, 0, 176, 958, 1, 0, 0, 0, 178, 963, 1, 0, 0, 0, 180, 984, 1, 0, 0, 0, 182, 986, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1000, 1, 0, 0, 0, 190, 1004, 1, 0, 0, 0, 192, 1008, 1, 0, 0, 0, 194, 1013, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1022, 1, 0, 0, 0, 200, 1026, 1, 0, 0, 0, 202, 1030, 1, 0, 0, 0, 204, 1035, 1, 0, 0, 0, 206, 1039, 1, 0, 0, 0, 208, 1043, 1, 0, 0, 0, 210, 1047, 1, 0, 0, 0, 212, 1051, 1, 0, 0, 0, 214, 1055, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1070, 1, 0, 0, 0, 220, 1074, 1, 0, 0, 0, 222, 1078, 1, 0, 0, 0, 224, 1082, 1, 0, 0, 0, 226, 1086, 1, 0, 0, 0, 228, 1090, 1, 0, 0, 0, 230, 1094, 1, 0, 0, 0, 232, 1099, 1, 0, 0, 0, 234, 1103, 1, 0, 0, 0, 236, 1107, 1, 0, 0, 0, 238, 1112, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1142, 1, 0, 0, 0, 244, 1146, 1, 0, 0, 0, 246, 1150, 1, 0, 0, 0, 248, 1154, 1, 0, 0, 0, 250, 1158, 1, 0, 0, 0, 252, 1162, 1, 0, 0, 0, 254, 1167, 1, 0, 0, 0, 256, 1171, 1, 0, 0, 0, 258, 1175, 1, 0, 0, 0, 260, 1179, 1, 0, 0, 0, 262, 1184, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1192, 1, 0, 0, 0, 268, 1196, 1, 0, 0, 0, 270, 1200, 1, 0, 0, 0, 272, 1204, 1, 0, 0, 0, 274, 1208, 1, 0, 0, 0, 276, 1213, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1250, 1, 0, 0, 0, 290, 1254, 1, 0, 0, 0, 292, 1258, 1, 0, 0, 0, 294, 1262, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1276, 1, 0, 0, 0, 302, 1280, 1, 0, 0, 0, 304, 1284, 1, 0, 0, 0, 306, 1288, 1, 0, 0, 0, 308, 1292, 1, 0, 0, 0, 310, 1297, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1306, 1, 0, 0, 0, 316, 1310, 1, 0, 0, 0, 318, 1314, 1, 0, 0, 0, 320, 1319, 1, 0, 0, 0, 322, 1323, 1, 0, 0, 0, 324, 1328, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1337, 1, 0, 0, 0, 330, 1341, 1, 0, 0, 0, 332, 1345, 1, 0, 0, 0, 334, 1349, 1, 0, 0, 0, 336, 1353, 1, 0, 0, 0, 338, 1358, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1367, 1, 0, 0, 0, 344, 1371, 1, 0, 0, 0, 346, 1375, 1, 0, 0, 0, 348, 1380, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1393, 1, 0, 0, 0, 354, 1397, 1, 0, 0, 0, 356, 1401, 1, 0, 0, 0, 358, 1405, 1, 0, 0, 0, 360, 1410, 1, 0, 0, 0, 362, 1414, 1, 0, 0, 0, 364, 1418, 1, 0, 0, 0, 366, 1422, 1, 0, 0, 0, 368, 1427, 1, 0, 0, 0, 370, 1431, 1, 0, 0, 0, 372, 1435, 1, 0, 0, 0, 374, 1439, 1, 0, 0, 0, 376, 1443, 1, 0, 0, 0, 378, 1447, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1461, 1, 0, 0, 0, 386, 1465, 1, 0, 0, 0, 388, 1469, 1, 0, 0, 0, 390, 1473, 1, 0, 0, 0, 392, 1477, 1, 0, 0, 0, 394, 1482, 1, 0, 0, 0, 396, 1486, 1, 0, 0, 0, 398, 1490, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1505, 1, 0, 0, 0, 404, 1509, 1, 0, 0, 0, 406, 1513, 1, 0, 0, 0, 408, 1517, 1, 0, 0, 0, 410, 1521, 1, 0, 0, 0, 412, 1525, 1, 0, 0, 0, 414, 1530, 1, 0, 0, 0, 416, 1536, 1, 0, 0, 0, 418, 1542, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1550, 1, 0, 0, 0, 424, 1554, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1566, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1574, 1, 0, 0, 0, 434, 1578, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1590, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 443, 7, 0, 0, 0, 443, 444, 7, 1, 0, 0, 444, 445, 7, 2, 0, 0, 445, 446, 7, 2, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 4, 0, 0, 448, 449, 7, 5, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 0, 0, 0, 451, 17, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 8, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 1, 1, 0, 458, 19, 1, 0, 0, 0, 459, 460, 7, 3, 0, 0, 460, 461, 7, 9, 0, 0, 461, 462, 7, 6, 0, 0, 462, 463, 7, 1, 0, 0, 463, 464, 7, 4, 0, 0, 464, 465, 7, 10, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 2, 2, 0, 467, 21, 1, 0, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 11, 0, 0, 470, 471, 7, 12, 0, 0, 471, 472, 7, 13, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 3, 0, 0, 474, 23, 1, 0, 0, 0, 475, 476, 7, 3, 0, 0, 476, 477, 7, 14, 0, 0, 477, 478, 7, 8, 0, 0, 478, 479, 7, 13, 0, 0, 479, 480, 7, 12, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 4, 3, 0, 484, 25, 1, 0, 0, 0, 485, 486, 7, 15, 0, 0, 486, 487, 7, 6, 0, 0, 487, 488, 7, 7, 0, 0, 488, 489, 7, 16, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 5, 4, 0, 491, 27, 1, 0, 0, 0, 492, 493, 7, 17, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 7, 0, 0, 495, 496, 7, 18, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 6, 0, 0, 498, 29, 1, 0, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 7, 3, 0, 0, 502, 503, 7, 8, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 7, 1, 0, 505, 31, 1, 0, 0, 0, 506, 507, 7, 13, 0, 0, 507, 508, 7, 1, 0, 0, 508, 509, 7, 16, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 5, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 8, 0, 0, 513, 33, 1, 0, 0, 0, 514, 515, 7, 16, 0, 0, 515, 516, 7, 11, 0, 0, 516, 517, 5, 95, 0, 0, 517, 518, 7, 3, 0, 0, 518, 519, 7, 14, 0, 0, 519, 520, 7, 8, 0, 0, 520, 521, 7, 12, 0, 0, 521, 522, 7, 9, 0, 0, 522, 523, 7, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 9, 5, 0, 525, 35, 1, 0, 0, 0, 526, 527, 7, 6, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 9, 0, 0, 529, 530, 7, 12, 0, 0, 530, 531, 7, 16, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 10, 6, 0, 534, 37, 1, 0, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 7, 0, 0, 537, 538, 7, 19, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 6, 11, 0, 0, 540, 39, 1, 0, 0, 0, 541, 542, 7, 2, 0, 0, 542, 543, 7, 10, 0, 0, 543, 544, 7, 7, 0, 0, 544, 545, 7, 19, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 12, 7, 0, 547, 41, 1, 0, 0, 0, 548, 549, 7, 2, 0, 0, 549, 550, 7, 7, 0, 0, 550, 551, 7, 6, 0, 0, 551, 552, 7, 5, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 13, 0, 0, 554, 43, 1, 0, 0, 0, 555, 556, 7, 2, 0, 0, 556, 557, 7, 5, 0, 0, 557, 558, 7, 12, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 14, 0, 0, 562, 45, 1, 0, 0, 0, 563, 564, 7, 19, 0, 0, 564, 565, 7, 10, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 6, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 6, 15, 0, 0, 570, 47, 1, 0, 0, 0, 571, 572, 4, 16, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 9, 0, 0, 574, 575, 7, 13, 0, 0, 575, 576, 7, 1, 0, 0, 576, 577, 7, 9, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 7, 2, 0, 0, 579, 580, 7, 5, 0, 0, 580, 581, 7, 12, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 16, 0, 0, 585, 49, 1, 0, 0, 0, 586, 587, 4, 17, 1, 0, 587, 588, 7, 13, 0, 0, 588, 589, 7, 7, 0, 0, 589, 590, 7, 7, 0, 0, 590, 591, 7, 18, 0, 0, 591, 592, 7, 20, 0, 0, 592, 593, 7, 8, 0, 0, 593, 594, 5, 95, 0, 0, 594, 595, 5, 128020, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 17, 8, 0, 597, 51, 1, 0, 0, 0, 598, 599, 4, 18, 2, 0, 599, 600, 7, 16, 0, 0, 600, 601, 7, 3, 0, 0, 601, 602, 7, 5, 0, 0, 602, 603, 7, 6, 0, 0, 603, 604, 7, 1, 0, 0, 604, 605, 7, 4, 0, 0, 605, 606, 7, 2, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 18, 9, 0, 608, 53, 1, 0, 0, 0, 609, 610, 4, 19, 3, 0, 610, 611, 7, 21, 0, 0, 611, 612, 7, 7, 0, 0, 612, 613, 7, 1, 0, 0, 613, 614, 7, 9, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 19, 10, 0, 616, 55, 1, 0, 0, 0, 617, 618, 4, 20, 4, 0, 618, 619, 7, 15, 0, 0, 619, 620, 7, 20, 0, 0, 620, 621, 7, 13, 0, 0, 621, 622, 7, 13, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 6, 20, 10, 0, 624, 57, 1, 0, 0, 0, 625, 626, 4, 21, 5, 0, 626, 627, 7, 13, 0, 0, 627, 628, 7, 3, 0, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 5, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 21, 10, 0, 632, 59, 1, 0, 0, 0, 633, 634, 4, 22, 6, 0, 634, 635, 7, 6, 0, 0, 635, 636, 7, 1, 0, 0, 636, 637, 7, 17, 0, 0, 637, 638, 7, 10, 0, 0, 638, 639, 7, 5, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 22, 10, 0, 641, 61, 1, 0, 0, 0, 642, 643, 4, 23, 7, 0, 643, 644, 7, 13, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 7, 0, 0, 646, 647, 7, 18, 0, 0, 647, 648, 7, 20, 0, 0, 648, 649, 7, 8, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 23, 10, 0, 651, 63, 1, 0, 0, 0, 652, 654, 8, 22, 0, 0, 653, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 24, 0, 0, 658, 65, 1, 0, 0, 0, 659, 660, 5, 47, 0, 0, 660, 661, 5, 47, 0, 0, 661, 665, 1, 0, 0, 0, 662, 664, 8, 23, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 670, 5, 13, 0, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 5, 10, 0, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 675, 6, 25, 11, 0, 675, 67, 1, 0, 0, 0, 676, 677, 5, 47, 0, 0, 677, 678, 5, 42, 0, 0, 678, 683, 1, 0, 0, 0, 679, 682, 3, 68, 26, 0, 680, 682, 9, 0, 0, 0, 681, 679, 1, 0, 0, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 684, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 687, 5, 42, 0, 0, 687, 688, 5, 47, 0, 0, 688, 689, 1, 0, 0, 0, 689, 690, 6, 26, 11, 0, 690, 69, 1, 0, 0, 0, 691, 693, 7, 24, 0, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 697, 6, 27, 11, 0, 697, 71, 1, 0, 0, 0, 698, 699, 5, 124, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 28, 12, 0, 701, 73, 1, 0, 0, 0, 702, 703, 7, 25, 0, 0, 703, 75, 1, 0, 0, 0, 704, 705, 7, 26, 0, 0, 705, 77, 1, 0, 0, 0, 706, 707, 5, 92, 0, 0, 707, 708, 7, 27, 0, 0, 708, 79, 1, 0, 0, 0, 709, 710, 8, 28, 0, 0, 710, 81, 1, 0, 0, 0, 711, 713, 7, 3, 0, 0, 712, 714, 7, 29, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 3, 74, 29, 0, 716, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 83, 1, 0, 0, 0, 720, 721, 5, 64, 0, 0, 721, 85, 1, 0, 0, 0, 722, 723, 5, 96, 0, 0, 723, 87, 1, 0, 0, 0, 724, 728, 8, 30, 0, 0, 725, 726, 5, 96, 0, 0, 726, 728, 5, 96, 0, 0, 727, 724, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 728, 89, 1, 0, 0, 0, 729, 730, 5, 95, 0, 0, 730, 91, 1, 0, 0, 0, 731, 735, 3, 76, 30, 0, 732, 735, 3, 74, 29, 0, 733, 735, 3, 90, 37, 0, 734, 731, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 733, 1, 0, 0, 0, 735, 93, 1, 0, 0, 0, 736, 741, 5, 34, 0, 0, 737, 740, 3, 78, 31, 0, 738, 740, 3, 80, 32, 0, 739, 737, 1, 0, 0, 0, 739, 738, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 744, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 744, 766, 5, 34, 0, 0, 745, 746, 5, 34, 0, 0, 746, 747, 5, 34, 0, 0, 747, 748, 5, 34, 0, 0, 748, 752, 1, 0, 0, 0, 749, 751, 8, 23, 0, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 753, 755, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 760, 1, 0, 0, 0, 759, 761, 5, 34, 0, 0, 760, 759, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 764, 5, 34, 0, 0, 763, 762, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 766, 1, 0, 0, 0, 765, 736, 1, 0, 0, 0, 765, 745, 1, 0, 0, 0, 766, 95, 1, 0, 0, 0, 767, 769, 3, 74, 29, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 97, 1, 0, 0, 0, 772, 774, 3, 74, 29, 0, 773, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 773, 1, 0, 0, 0, 775, 776, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 781, 3, 116, 50, 0, 778, 780, 3, 74, 29, 0, 779, 778, 1, 0, 0, 0, 780, 783, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 815, 1, 0, 0, 0, 783, 781, 1, 0, 0, 0, 784, 786, 3, 116, 50, 0, 785, 787, 3, 74, 29, 0, 786, 785, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 815, 1, 0, 0, 0, 790, 792, 3, 74, 29, 0, 791, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 802, 1, 0, 0, 0, 795, 799, 3, 116, 50, 0, 796, 798, 3, 74, 29, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 3, 82, 33, 0, 805, 815, 1, 0, 0, 0, 806, 808, 3, 116, 50, 0, 807, 809, 3, 74, 29, 0, 808, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 810, 811, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 813, 3, 82, 33, 0, 813, 815, 1, 0, 0, 0, 814, 773, 1, 0, 0, 0, 814, 784, 1, 0, 0, 0, 814, 791, 1, 0, 0, 0, 814, 806, 1, 0, 0, 0, 815, 99, 1, 0, 0, 0, 816, 817, 7, 31, 0, 0, 817, 818, 7, 32, 0, 0, 818, 101, 1, 0, 0, 0, 819, 820, 7, 12, 0, 0, 820, 821, 7, 9, 0, 0, 821, 822, 7, 0, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 2, 0, 0, 825, 826, 7, 4, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 5, 61, 0, 0, 828, 107, 1, 0, 0, 0, 829, 830, 5, 58, 0, 0, 830, 831, 5, 58, 0, 0, 831, 109, 1, 0, 0, 0, 832, 833, 5, 58, 0, 0, 833, 111, 1, 0, 0, 0, 834, 835, 5, 44, 0, 0, 835, 113, 1, 0, 0, 0, 836, 837, 7, 0, 0, 0, 837, 838, 7, 3, 0, 0, 838, 839, 7, 2, 0, 0, 839, 840, 7, 4, 0, 0, 840, 115, 1, 0, 0, 0, 841, 842, 5, 46, 0, 0, 842, 117, 1, 0, 0, 0, 843, 844, 7, 15, 0, 0, 844, 845, 7, 12, 0, 0, 845, 846, 7, 13, 0, 0, 846, 847, 7, 2, 0, 0, 847, 848, 7, 3, 0, 0, 848, 119, 1, 0, 0, 0, 849, 850, 7, 15, 0, 0, 850, 851, 7, 1, 0, 0, 851, 852, 7, 6, 0, 0, 852, 853, 7, 2, 0, 0, 853, 854, 7, 5, 0, 0, 854, 121, 1, 0, 0, 0, 855, 856, 7, 1, 0, 0, 856, 857, 7, 9, 0, 0, 857, 123, 1, 0, 0, 0, 858, 859, 7, 1, 0, 0, 859, 860, 7, 2, 0, 0, 860, 125, 1, 0, 0, 0, 861, 862, 7, 13, 0, 0, 862, 863, 7, 12, 0, 0, 863, 864, 7, 2, 0, 0, 864, 865, 7, 5, 0, 0, 865, 127, 1, 0, 0, 0, 866, 867, 7, 13, 0, 0, 867, 868, 7, 1, 0, 0, 868, 869, 7, 18, 0, 0, 869, 870, 7, 3, 0, 0, 870, 129, 1, 0, 0, 0, 871, 872, 5, 40, 0, 0, 872, 131, 1, 0, 0, 0, 873, 874, 7, 9, 0, 0, 874, 875, 7, 7, 0, 0, 875, 876, 7, 5, 0, 0, 876, 133, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 20, 0, 0, 879, 880, 7, 13, 0, 0, 880, 881, 7, 13, 0, 0, 881, 135, 1, 0, 0, 0, 882, 883, 7, 9, 0, 0, 883, 884, 7, 20, 0, 0, 884, 885, 7, 13, 0, 0, 885, 886, 7, 13, 0, 0, 886, 887, 7, 2, 0, 0, 887, 137, 1, 0, 0, 0, 888, 889, 7, 7, 0, 0, 889, 890, 7, 6, 0, 0, 890, 139, 1, 0, 0, 0, 891, 892, 5, 63, 0, 0, 892, 141, 1, 0, 0, 0, 893, 894, 7, 6, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 1, 0, 0, 896, 897, 7, 18, 0, 0, 897, 898, 7, 3, 0, 0, 898, 143, 1, 0, 0, 0, 899, 900, 5, 41, 0, 0, 900, 145, 1, 0, 0, 0, 901, 902, 7, 5, 0, 0, 902, 903, 7, 6, 0, 0, 903, 904, 7, 20, 0, 0, 904, 905, 7, 3, 0, 0, 905, 147, 1, 0, 0, 0, 906, 907, 5, 61, 0, 0, 907, 908, 5, 61, 0, 0, 908, 149, 1, 0, 0, 0, 909, 910, 5, 61, 0, 0, 910, 911, 5, 126, 0, 0, 911, 151, 1, 0, 0, 0, 912, 913, 5, 33, 0, 0, 913, 914, 5, 61, 0, 0, 914, 153, 1, 0, 0, 0, 915, 916, 5, 60, 0, 0, 916, 155, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 919, 5, 61, 0, 0, 919, 157, 1, 0, 0, 0, 920, 921, 5, 62, 0, 0, 921, 159, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 924, 5, 61, 0, 0, 924, 161, 1, 0, 0, 0, 925, 926, 5, 43, 0, 0, 926, 163, 1, 0, 0, 0, 927, 928, 5, 45, 0, 0, 928, 165, 1, 0, 0, 0, 929, 930, 5, 42, 0, 0, 930, 167, 1, 0, 0, 0, 931, 932, 5, 47, 0, 0, 932, 169, 1, 0, 0, 0, 933, 934, 5, 37, 0, 0, 934, 171, 1, 0, 0, 0, 935, 936, 3, 46, 15, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 78, 13, 0, 938, 173, 1, 0, 0, 0, 939, 942, 3, 140, 62, 0, 940, 943, 3, 76, 30, 0, 941, 943, 3, 90, 37, 0, 942, 940, 1, 0, 0, 0, 942, 941, 1, 0, 0, 0, 943, 947, 1, 0, 0, 0, 944, 946, 3, 92, 38, 0, 945, 944, 1, 0, 0, 0, 946, 949, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 957, 1, 0, 0, 0, 949, 947, 1, 0, 0, 0, 950, 952, 3, 140, 62, 0, 951, 953, 3, 74, 29, 0, 952, 951, 1, 0, 0, 0, 953, 954, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 957, 1, 0, 0, 0, 956, 939, 1, 0, 0, 0, 956, 950, 1, 0, 0, 0, 957, 175, 1, 0, 0, 0, 958, 959, 5, 91, 0, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 80, 0, 0, 961, 962, 6, 80, 0, 0, 962, 177, 1, 0, 0, 0, 963, 964, 5, 93, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 81, 12, 0, 966, 967, 6, 81, 12, 0, 967, 179, 1, 0, 0, 0, 968, 972, 3, 76, 30, 0, 969, 971, 3, 92, 38, 0, 970, 969, 1, 0, 0, 0, 971, 974, 1, 0, 0, 0, 972, 970, 1, 0, 0, 0, 972, 973, 1, 0, 0, 0, 973, 985, 1, 0, 0, 0, 974, 972, 1, 0, 0, 0, 975, 978, 3, 90, 37, 0, 976, 978, 3, 84, 34, 0, 977, 975, 1, 0, 0, 0, 977, 976, 1, 0, 0, 0, 978, 980, 1, 0, 0, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 985, 1, 0, 0, 0, 984, 968, 1, 0, 0, 0, 984, 977, 1, 0, 0, 0, 985, 181, 1, 0, 0, 0, 986, 988, 3, 86, 35, 0, 987, 989, 3, 88, 36, 0, 988, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 3, 86, 35, 0, 993, 183, 1, 0, 0, 0, 994, 995, 3, 182, 83, 0, 995, 185, 1, 0, 0, 0, 996, 997, 3, 66, 25, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 85, 11, 0, 999, 187, 1, 0, 0, 0, 1000, 1001, 3, 68, 26, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 86, 11, 0, 1003, 189, 1, 0, 0, 0, 1004, 1005, 3, 70, 27, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 87, 11, 0, 1007, 191, 1, 0, 0, 0, 1008, 1009, 3, 176, 80, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 88, 14, 0, 1011, 1012, 6, 88, 15, 0, 1012, 193, 1, 0, 0, 0, 1013, 1014, 3, 72, 28, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 89, 16, 0, 1016, 1017, 6, 89, 12, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 11, 0, 1021, 197, 1, 0, 0, 0, 1022, 1023, 3, 66, 25, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 91, 11, 0, 1025, 199, 1, 0, 0, 0, 1026, 1027, 3, 68, 26, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 92, 11, 0, 1029, 201, 1, 0, 0, 0, 1030, 1031, 3, 72, 28, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 93, 16, 0, 1033, 1034, 6, 93, 12, 0, 1034, 203, 1, 0, 0, 0, 1035, 1036, 3, 176, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 94, 14, 0, 1038, 205, 1, 0, 0, 0, 1039, 1040, 3, 178, 81, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 95, 17, 0, 1042, 207, 1, 0, 0, 0, 1043, 1044, 3, 110, 47, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 96, 18, 0, 1046, 209, 1, 0, 0, 0, 1047, 1048, 3, 112, 48, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 97, 19, 0, 1050, 211, 1, 0, 0, 0, 1051, 1052, 3, 106, 45, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 98, 20, 0, 1054, 213, 1, 0, 0, 0, 1055, 1056, 7, 16, 0, 0, 1056, 1057, 7, 3, 0, 0, 1057, 1058, 7, 5, 0, 0, 1058, 1059, 7, 12, 0, 0, 1059, 1060, 7, 0, 0, 0, 1060, 1061, 7, 12, 0, 0, 1061, 1062, 7, 5, 0, 0, 1062, 1063, 7, 12, 0, 0, 1063, 215, 1, 0, 0, 0, 1064, 1068, 8, 33, 0, 0, 1065, 1066, 5, 47, 0, 0, 1066, 1068, 8, 34, 0, 0, 1067, 1064, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 217, 1, 0, 0, 0, 1069, 1071, 3, 216, 100, 0, 1070, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1075, 3, 218, 101, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 102, 21, 0, 1077, 221, 1, 0, 0, 0, 1078, 1079, 3, 94, 39, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 103, 22, 0, 1081, 223, 1, 0, 0, 0, 1082, 1083, 3, 66, 25, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 104, 11, 0, 1085, 225, 1, 0, 0, 0, 1086, 1087, 3, 68, 26, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 105, 11, 0, 1089, 227, 1, 0, 0, 0, 1090, 1091, 3, 70, 27, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 106, 11, 0, 1093, 229, 1, 0, 0, 0, 1094, 1095, 3, 72, 28, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 107, 16, 0, 1097, 1098, 6, 107, 12, 0, 1098, 231, 1, 0, 0, 0, 1099, 1100, 3, 116, 50, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 108, 23, 0, 1102, 233, 1, 0, 0, 0, 1103, 1104, 3, 112, 48, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 109, 19, 0, 1106, 235, 1, 0, 0, 0, 1107, 1108, 4, 110, 8, 0, 1108, 1109, 3, 140, 62, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 110, 24, 0, 1111, 237, 1, 0, 0, 0, 1112, 1113, 4, 111, 9, 0, 1113, 1114, 3, 174, 79, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 25, 0, 1116, 239, 1, 0, 0, 0, 1117, 1122, 3, 76, 30, 0, 1118, 1122, 3, 74, 29, 0, 1119, 1122, 3, 90, 37, 0, 1120, 1122, 3, 166, 75, 0, 1121, 1117, 1, 0, 0, 0, 1121, 1118, 1, 0, 0, 0, 1121, 1119, 1, 0, 0, 0, 1121, 1120, 1, 0, 0, 0, 1122, 241, 1, 0, 0, 0, 1123, 1126, 3, 76, 30, 0, 1124, 1126, 3, 166, 75, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1130, 1, 0, 0, 0, 1127, 1129, 3, 240, 112, 0, 1128, 1127, 1, 0, 0, 0, 1129, 1132, 1, 0, 0, 0, 1130, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1143, 1, 0, 0, 0, 1132, 1130, 1, 0, 0, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 84, 34, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1138, 1, 0, 0, 0, 1137, 1139, 3, 240, 112, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1143, 1, 0, 0, 0, 1142, 1125, 1, 0, 0, 0, 1142, 1135, 1, 0, 0, 0, 1143, 243, 1, 0, 0, 0, 1144, 1147, 3, 242, 113, 0, 1145, 1147, 3, 182, 83, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 245, 1, 0, 0, 0, 1150, 1151, 3, 66, 25, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 115, 11, 0, 1153, 247, 1, 0, 0, 0, 1154, 1155, 3, 68, 26, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 116, 11, 0, 1157, 249, 1, 0, 0, 0, 1158, 1159, 3, 70, 27, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 117, 11, 0, 1161, 251, 1, 0, 0, 0, 1162, 1163, 3, 72, 28, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 118, 16, 0, 1165, 1166, 6, 118, 12, 0, 1166, 253, 1, 0, 0, 0, 1167, 1168, 3, 106, 45, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 119, 20, 0, 1170, 255, 1, 0, 0, 0, 1171, 1172, 3, 112, 48, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 120, 19, 0, 1174, 257, 1, 0, 0, 0, 1175, 1176, 3, 116, 50, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 121, 23, 0, 1178, 259, 1, 0, 0, 0, 1179, 1180, 4, 122, 10, 0, 1180, 1181, 3, 140, 62, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 122, 24, 0, 1183, 261, 1, 0, 0, 0, 1184, 1185, 4, 123, 11, 0, 1185, 1186, 3, 174, 79, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 25, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 7, 12, 0, 0, 1190, 1191, 7, 2, 0, 0, 1191, 265, 1, 0, 0, 0, 1192, 1193, 3, 244, 114, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 125, 26, 0, 1195, 267, 1, 0, 0, 0, 1196, 1197, 3, 66, 25, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 126, 11, 0, 1199, 269, 1, 0, 0, 0, 1200, 1201, 3, 68, 26, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 127, 11, 0, 1203, 271, 1, 0, 0, 0, 1204, 1205, 3, 70, 27, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 128, 11, 0, 1207, 273, 1, 0, 0, 0, 1208, 1209, 3, 72, 28, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 129, 16, 0, 1211, 1212, 6, 129, 12, 0, 1212, 275, 1, 0, 0, 0, 1213, 1214, 3, 176, 80, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 130, 14, 0, 1216, 1217, 6, 130, 27, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 7, 7, 0, 0, 1219, 1220, 7, 9, 0, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 131, 28, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 7, 19, 0, 0, 1224, 1225, 7, 1, 0, 0, 1225, 1226, 7, 5, 0, 0, 1226, 1227, 7, 10, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 8, 35, 0, 0, 1231, 283, 1, 0, 0, 0, 1232, 1234, 3, 282, 133, 0, 1233, 1232, 1, 0, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1233, 1, 0, 0, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 3, 110, 47, 0, 1238, 1240, 1, 0, 0, 0, 1239, 1233, 1, 0, 0, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1242, 1, 0, 0, 0, 1241, 1243, 3, 282, 133, 0, 1242, 1241, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 3, 284, 134, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 135, 29, 0, 1249, 287, 1, 0, 0, 0, 1250, 1251, 3, 66, 25, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 136, 11, 0, 1253, 289, 1, 0, 0, 0, 1254, 1255, 3, 68, 26, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 137, 11, 0, 1257, 291, 1, 0, 0, 0, 1258, 1259, 3, 70, 27, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 138, 11, 0, 1261, 293, 1, 0, 0, 0, 1262, 1263, 3, 72, 28, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 139, 16, 0, 1265, 1266, 6, 139, 12, 0, 1266, 1267, 6, 139, 12, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 106, 45, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 20, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 112, 48, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 19, 0, 1275, 299, 1, 0, 0, 0, 1276, 1277, 3, 116, 50, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 142, 23, 0, 1279, 301, 1, 0, 0, 0, 1280, 1281, 3, 280, 132, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 143, 30, 0, 1283, 303, 1, 0, 0, 0, 1284, 1285, 3, 244, 114, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 144, 26, 0, 1287, 305, 1, 0, 0, 0, 1288, 1289, 3, 184, 84, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 145, 31, 0, 1291, 307, 1, 0, 0, 0, 1292, 1293, 4, 146, 12, 0, 1293, 1294, 3, 140, 62, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 146, 24, 0, 1296, 309, 1, 0, 0, 0, 1297, 1298, 4, 147, 13, 0, 1298, 1299, 3, 174, 79, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 25, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 3, 66, 25, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 148, 11, 0, 1305, 313, 1, 0, 0, 0, 1306, 1307, 3, 68, 26, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 149, 11, 0, 1309, 315, 1, 0, 0, 0, 1310, 1311, 3, 70, 27, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 150, 11, 0, 1313, 317, 1, 0, 0, 0, 1314, 1315, 3, 72, 28, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 151, 16, 0, 1317, 1318, 6, 151, 12, 0, 1318, 319, 1, 0, 0, 0, 1319, 1320, 3, 116, 50, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 152, 23, 0, 1322, 321, 1, 0, 0, 0, 1323, 1324, 4, 153, 14, 0, 1324, 1325, 3, 140, 62, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 24, 0, 1327, 323, 1, 0, 0, 0, 1328, 1329, 4, 154, 15, 0, 1329, 1330, 3, 174, 79, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 25, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 3, 184, 84, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 155, 31, 0, 1336, 327, 1, 0, 0, 0, 1337, 1338, 3, 180, 82, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 156, 32, 0, 1340, 329, 1, 0, 0, 0, 1341, 1342, 3, 66, 25, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 157, 11, 0, 1344, 331, 1, 0, 0, 0, 1345, 1346, 3, 68, 26, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 158, 11, 0, 1348, 333, 1, 0, 0, 0, 1349, 1350, 3, 70, 27, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 159, 11, 0, 1352, 335, 1, 0, 0, 0, 1353, 1354, 3, 72, 28, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 160, 16, 0, 1356, 1357, 6, 160, 12, 0, 1357, 337, 1, 0, 0, 0, 1358, 1359, 7, 1, 0, 0, 1359, 1360, 7, 9, 0, 0, 1360, 1361, 7, 15, 0, 0, 1361, 1362, 7, 7, 0, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 66, 25, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 11, 0, 1366, 341, 1, 0, 0, 0, 1367, 1368, 3, 68, 26, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 163, 11, 0, 1370, 343, 1, 0, 0, 0, 1371, 1372, 3, 70, 27, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 164, 11, 0, 1374, 345, 1, 0, 0, 0, 1375, 1376, 3, 178, 81, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 165, 17, 0, 1378, 1379, 6, 165, 12, 0, 1379, 347, 1, 0, 0, 0, 1380, 1381, 3, 110, 47, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 166, 18, 0, 1383, 349, 1, 0, 0, 0, 1384, 1390, 3, 84, 34, 0, 1385, 1390, 3, 74, 29, 0, 1386, 1390, 3, 116, 50, 0, 1387, 1390, 3, 76, 30, 0, 1388, 1390, 3, 90, 37, 0, 1389, 1384, 1, 0, 0, 0, 1389, 1385, 1, 0, 0, 0, 1389, 1386, 1, 0, 0, 0, 1389, 1387, 1, 0, 0, 0, 1389, 1388, 1, 0, 0, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1389, 1, 0, 0, 0, 1391, 1392, 1, 0, 0, 0, 1392, 351, 1, 0, 0, 0, 1393, 1394, 3, 66, 25, 0, 1394, 1395, 1, 0, 0, 0, 1395, 1396, 6, 168, 11, 0, 1396, 353, 1, 0, 0, 0, 1397, 1398, 3, 68, 26, 0, 1398, 1399, 1, 0, 0, 0, 1399, 1400, 6, 169, 11, 0, 1400, 355, 1, 0, 0, 0, 1401, 1402, 3, 70, 27, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 170, 11, 0, 1404, 357, 1, 0, 0, 0, 1405, 1406, 3, 72, 28, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 171, 16, 0, 1408, 1409, 6, 171, 12, 0, 1409, 359, 1, 0, 0, 0, 1410, 1411, 3, 110, 47, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 172, 18, 0, 1413, 361, 1, 0, 0, 0, 1414, 1415, 3, 112, 48, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 173, 19, 0, 1417, 363, 1, 0, 0, 0, 1418, 1419, 3, 116, 50, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 174, 23, 0, 1421, 365, 1, 0, 0, 0, 1422, 1423, 3, 278, 131, 0, 1423, 1424, 1, 0, 0, 0, 1424, 1425, 6, 175, 33, 0, 1425, 1426, 6, 175, 34, 0, 1426, 367, 1, 0, 0, 0, 1427, 1428, 3, 218, 101, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 176, 21, 0, 1430, 369, 1, 0, 0, 0, 1431, 1432, 3, 94, 39, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 177, 22, 0, 1434, 371, 1, 0, 0, 0, 1435, 1436, 3, 66, 25, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 178, 11, 0, 1438, 373, 1, 0, 0, 0, 1439, 1440, 3, 68, 26, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 179, 11, 0, 1442, 375, 1, 0, 0, 0, 1443, 1444, 3, 70, 27, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 180, 11, 0, 1446, 377, 1, 0, 0, 0, 1447, 1448, 3, 72, 28, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 181, 16, 0, 1450, 1451, 6, 181, 12, 0, 1451, 1452, 6, 181, 12, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 112, 48, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 19, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 116, 50, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 23, 0, 1460, 383, 1, 0, 0, 0, 1461, 1462, 3, 244, 114, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 184, 26, 0, 1464, 385, 1, 0, 0, 0, 1465, 1466, 3, 66, 25, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 185, 11, 0, 1468, 387, 1, 0, 0, 0, 1469, 1470, 3, 68, 26, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 186, 11, 0, 1472, 389, 1, 0, 0, 0, 1473, 1474, 3, 70, 27, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 187, 11, 0, 1476, 391, 1, 0, 0, 0, 1477, 1478, 3, 72, 28, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 188, 16, 0, 1480, 1481, 6, 188, 12, 0, 1481, 393, 1, 0, 0, 0, 1482, 1483, 3, 54, 19, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 189, 35, 0, 1485, 395, 1, 0, 0, 0, 1486, 1487, 3, 264, 124, 0, 1487, 1488, 1, 0, 0, 0, 1488, 1489, 6, 190, 36, 0, 1489, 397, 1, 0, 0, 0, 1490, 1491, 3, 278, 131, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 6, 191, 33, 0, 1493, 1494, 6, 191, 12, 0, 1494, 1495, 6, 191, 0, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 7, 20, 0, 0, 1497, 1498, 7, 2, 0, 0, 1498, 1499, 7, 1, 0, 0, 1499, 1500, 7, 9, 0, 0, 1500, 1501, 7, 17, 0, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 12, 0, 1503, 1504, 6, 192, 0, 0, 1504, 401, 1, 0, 0, 0, 1505, 1506, 3, 180, 82, 0, 1506, 1507, 1, 0, 0, 0, 1507, 1508, 6, 193, 32, 0, 1508, 403, 1, 0, 0, 0, 1509, 1510, 3, 184, 84, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 194, 31, 0, 1512, 405, 1, 0, 0, 0, 1513, 1514, 3, 66, 25, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 195, 11, 0, 1516, 407, 1, 0, 0, 0, 1517, 1518, 3, 68, 26, 0, 1518, 1519, 1, 0, 0, 0, 1519, 1520, 6, 196, 11, 0, 1520, 409, 1, 0, 0, 0, 1521, 1522, 3, 70, 27, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 197, 11, 0, 1524, 411, 1, 0, 0, 0, 1525, 1526, 3, 72, 28, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 198, 16, 0, 1528, 1529, 6, 198, 12, 0, 1529, 413, 1, 0, 0, 0, 1530, 1531, 3, 218, 101, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1533, 6, 199, 21, 0, 1533, 1534, 6, 199, 12, 0, 1534, 1535, 6, 199, 37, 0, 1535, 415, 1, 0, 0, 0, 1536, 1537, 3, 94, 39, 0, 1537, 1538, 1, 0, 0, 0, 1538, 1539, 6, 200, 22, 0, 1539, 1540, 6, 200, 12, 0, 1540, 1541, 6, 200, 37, 0, 1541, 417, 1, 0, 0, 0, 1542, 1543, 3, 66, 25, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 201, 11, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 68, 26, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 11, 0, 1549, 421, 1, 0, 0, 0, 1550, 1551, 3, 70, 27, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 203, 11, 0, 1553, 423, 1, 0, 0, 0, 1554, 1555, 3, 110, 47, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 204, 18, 0, 1557, 1558, 6, 204, 12, 0, 1558, 1559, 6, 204, 9, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 112, 48, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 19, 0, 1563, 1564, 6, 205, 12, 0, 1564, 1565, 6, 205, 9, 0, 1565, 427, 1, 0, 0, 0, 1566, 1567, 3, 66, 25, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 206, 11, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 68, 26, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 11, 0, 1573, 431, 1, 0, 0, 0, 1574, 1575, 3, 70, 27, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 208, 11, 0, 1577, 433, 1, 0, 0, 0, 1578, 1579, 3, 184, 84, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 209, 12, 0, 1581, 1582, 6, 209, 0, 0, 1582, 1583, 6, 209, 31, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 180, 82, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 12, 0, 1587, 1588, 6, 210, 0, 0, 1588, 1589, 6, 210, 32, 0, 1589, 437, 1, 0, 0, 0, 1590, 1591, 3, 100, 42, 0, 1591, 1592, 1, 0, 0, 0, 1592, 1593, 6, 211, 12, 0, 1593, 1594, 6, 211, 0, 0, 1594, 1595, 6, 211, 38, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 72, 28, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 16, 0, 1599, 1600, 6, 212, 12, 0, 1600, 441, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 655, 665, 669, 672, 681, 683, 694, 713, 718, 727, 734, 739, 741, 752, 760, 763, 765, 770, 775, 781, 788, 793, 799, 802, 810, 814, 942, 947, 954, 956, 972, 977, 982, 984, 990, 1067, 1072, 1121, 1125, 1130, 1135, 1140, 1142, 1146, 1148, 1235, 1239, 1244, 1389, 1391, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 29, 0, 7, 71, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 81, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index f10881fcf0692..f04582e820e28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -8,16 +8,14 @@ * 2.0. */ +import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.LexerATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape"}) public class EsqlBaseLexer extends LexerConfig { @@ -27,90 +25,96 @@ public class EsqlBaseLexer extends LexerConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, - INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, - CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, - LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, - RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, - PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, - SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, - LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, - LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, - METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, - CLOSING_METRICS_WS=119; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, + OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, + LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, + NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, + UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, + EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, + METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, + FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, + PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, + RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, + ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, + ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, + MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, + SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, + SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, + LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, + LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, + JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, + METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, + CLOSING_METRICS_WS=128; public static final int - EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, - LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; + EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, JOIN_MODE=13, METRICS_MODE=14, CLOSING_METRICS_MODE=15; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", - "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" + "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", + "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "JOIN_MODE", "METRICS_MODE", + "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { - "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", - "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", - "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", - "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", - "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", - "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", - "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", - "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", - "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", - "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", - "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", - "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", - "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", - "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", - "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", - "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", - "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", - "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_PIPE", - "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", + "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", + "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", + "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", + "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", + "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_PARAM", "ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_PARAM", "MVEXPAND_NAMED_OR_POSITIONAL_PARAM", + "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "SETTING_COLON", + "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", + "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", + "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", + "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_PIPE", "LOOKUP_FIELD_COMMA", + "LOOKUP_FIELD_DOT", "LOOKUP_FIELD_ID_PATTERN", "LOOKUP_FIELD_LINE_COMMENT", + "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "JOIN_PIPE", "JOIN_JOIN", + "JOIN_AS", "JOIN_ON", "USING", "JOIN_UNQUOTED_IDENTIFER", "JOIN_QUOTED_IDENTIFIER", + "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_PIPE", + "METRICS_UNQUOTED_SOURCE", "METRICS_QUOTED_SOURCE", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COLON", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } @@ -118,45 +122,49 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", + "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, null, null, null, null, null, null, "'as'", null, null, null, + "'on'", "'with'", null, null, null, null, null, null, null, null, null, + null, "'info'", null, null, null, null, null, null, null, null, null, + null, null, null, null, "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -228,23 +236,31 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); case 18: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 73: - return EXPRESSION_COLON_sempred((RuleContext)_localctx, predIndex); - case 106: + case 19: + return DEV_JOIN_sempred((RuleContext)_localctx, predIndex); + case 20: + return DEV_JOIN_FULL_sempred((RuleContext)_localctx, predIndex); + case 21: + return DEV_JOIN_LEFT_sempred((RuleContext)_localctx, predIndex); + case 22: + return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); + case 23: + return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); + case 110: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 107: + case 111: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 118: + case 122: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 119: + case 123: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 142: + case 146: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 143: + case 147: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 149: + case 153: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 150: + case 154: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -270,1034 +286,1142 @@ private boolean DEV_METRICS_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 3: return this.isDevVersion(); } return true; } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_FULL_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 4: return this.isDevVersion(); } return true; } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_LEFT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 5: return this.isDevVersion(); } return true; } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_RIGHT_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 6: return this.isDevVersion(); } return true; } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 7: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 8: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 9: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 10: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 11: return this.isDevVersion(); } return true; } + private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 12: + return this.isDevVersion(); + } + return true; + } + private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 13: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 14: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 15: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000w\u05cc\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ - "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ - "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ - "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ - "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ - "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ - "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ - "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ - "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ - "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ - "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ - "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ - "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ - "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ - "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ - "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ - "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ - "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ - "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ - "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ - "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ - "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ - "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ - "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ - "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ - "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ - "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ - "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ - "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ - "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ - "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ - "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ - "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ - "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ - "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ - "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ - "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ - "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002"+ - "\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002"+ - "\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002"+ - "\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002"+ - "\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002"+ - "\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002"+ - "\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002"+ - "\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0002"+ - "\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007\u00b0\u0002"+ - "\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007\u00b3\u0002"+ - "\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007\u00b6\u0002"+ - "\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9\u0002"+ - "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ - "\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf\u0002"+ - "\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2\u0002"+ - "\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007\u00c5\u0002"+ - "\u00c6\u0007\u00c6\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0004\u0000\u0080\u0641\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\uffff\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ + "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ + "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ + "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ + "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ + "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ + "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ + "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ + ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ + "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ + "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007"+ + "E\u0002F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007"+ + "J\u0002K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007"+ + "O\u0002P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007"+ + "T\u0002U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007"+ + "Y\u0002Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007"+ + "^\u0002_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007"+ + "c\u0002d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007"+ + "h\u0002i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007"+ + "m\u0002n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007"+ + "r\u0002s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007"+ + "w\u0002x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007"+ + "|\u0002}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007"+ + "\u0080\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007"+ + "\u0083\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007"+ + "\u0086\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007"+ + "\u0089\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007"+ + "\u008c\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007"+ + "\u008f\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007"+ + "\u0092\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007"+ + "\u0095\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007"+ + "\u0098\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007"+ + "\u009b\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007"+ + "\u009e\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007"+ + "\u00a1\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007"+ + "\u00a4\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007"+ + "\u00a7\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007"+ + "\u00aa\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007"+ + "\u00ad\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007"+ + "\u00b0\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007"+ + "\u00b3\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007"+ + "\u00b6\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007"+ + "\u00b9\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007"+ + "\u00bc\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007"+ + "\u00bf\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007"+ + "\u00c2\u0002\u00c3\u0007\u00c3\u0002\u00c4\u0007\u00c4\u0002\u00c5\u0007"+ + "\u00c5\u0002\u00c6\u0007\u00c6\u0002\u00c7\u0007\u00c7\u0002\u00c8\u0007"+ + "\u00c8\u0002\u00c9\u0007\u00c9\u0002\u00ca\u0007\u00ca\u0002\u00cb\u0007"+ + "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ + "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ + "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ + "\u00d4\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0001\u0013\u0004\u0013\u0244\b\u0013\u000b\u0013"+ - "\f\u0013\u0245\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u024e\b\u0014\n\u0014\f\u0014\u0251\t\u0014\u0001"+ - "\u0014\u0003\u0014\u0254\b\u0014\u0001\u0014\u0003\u0014\u0257\b\u0014"+ - "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0005\u0015\u0260\b\u0015\n\u0015\f\u0015\u0263\t\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004"+ - "\u0016\u026b\b\u0016\u000b\u0016\f\u0016\u026c\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0003\u001d"+ - "\u0282\b\u001d\u0001\u001d\u0004\u001d\u0285\b\u001d\u000b\u001d\f\u001d"+ - "\u0286\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001"+ - " \u0003 \u0290\b \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0003\"\u0297"+ - "\b\"\u0001#\u0001#\u0001#\u0005#\u029c\b#\n#\f#\u029f\t#\u0001#\u0001"+ - "#\u0001#\u0001#\u0001#\u0001#\u0005#\u02a7\b#\n#\f#\u02aa\t#\u0001#\u0001"+ - "#\u0001#\u0001#\u0001#\u0003#\u02b1\b#\u0001#\u0003#\u02b4\b#\u0003#\u02b6"+ - "\b#\u0001$\u0004$\u02b9\b$\u000b$\f$\u02ba\u0001%\u0004%\u02be\b%\u000b"+ - "%\f%\u02bf\u0001%\u0001%\u0005%\u02c4\b%\n%\f%\u02c7\t%\u0001%\u0001%"+ - "\u0004%\u02cb\b%\u000b%\f%\u02cc\u0001%\u0004%\u02d0\b%\u000b%\f%\u02d1"+ - "\u0001%\u0001%\u0005%\u02d6\b%\n%\f%\u02d9\t%\u0003%\u02db\b%\u0001%\u0001"+ - "%\u0001%\u0001%\u0004%\u02e1\b%\u000b%\f%\u02e2\u0001%\u0001%\u0003%\u02e7"+ - "\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001"+ - "(\u0001(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u0001"+ - "0\u00010\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ - "5\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001"+ - "@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ - "D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ - "I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001"+ - "K\u0003K\u036a\bK\u0001K\u0005K\u036d\bK\nK\fK\u0370\tK\u0001K\u0001K"+ - "\u0004K\u0374\bK\u000bK\fK\u0375\u0003K\u0378\bK\u0001L\u0001L\u0001L"+ - "\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005"+ - "N\u0386\bN\nN\fN\u0389\tN\u0001N\u0001N\u0003N\u038d\bN\u0001N\u0004N"+ - "\u0390\bN\u000bN\fN\u0391\u0003N\u0394\bN\u0001O\u0001O\u0004O\u0398\b"+ - "O\u000bO\fO\u0399\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001"+ - "Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ - "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001U\u0001V\u0001"+ - "V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ - "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ - "]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0003`\u03e7\b`\u0001"+ - "a\u0004a\u03ea\ba\u000ba\fa\u03eb\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001"+ - "h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001"+ - "j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001"+ - "l\u0001l\u0003l\u041d\bl\u0001m\u0001m\u0003m\u0421\bm\u0001m\u0005m\u0424"+ - "\bm\nm\fm\u0427\tm\u0001m\u0001m\u0003m\u042b\bm\u0001m\u0004m\u042e\b"+ - "m\u000bm\fm\u042f\u0003m\u0432\bm\u0001n\u0001n\u0004n\u0436\bn\u000b"+ - "n\fn\u0437\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001"+ - "q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001s\u0001"+ - "s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001"+ - "u\u0001v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ - "z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001"+ - "}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001~\u0001"+ - "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ - "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ - "\u0081\u0001\u0081\u0001\u0082\u0004\u0082\u048d\b\u0082\u000b\u0082\f"+ - "\u0082\u048e\u0001\u0082\u0001\u0082\u0003\u0082\u0493\b\u0082\u0001\u0082"+ - "\u0004\u0082\u0496\b\u0082\u000b\u0082\f\u0082\u0497\u0001\u0083\u0001"+ - "\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ - "\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ - "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ - "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ - "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ - "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ - "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ - "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ - "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001"+ - "\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ - "\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ - "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001"+ - "\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001"+ - "\u009b\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ - "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0004\u00a3\u0529\b\u00a3\u000b"+ - "\u00a3\f\u00a3\u052a\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ + "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004"+ + "\u0018\u028e\b\u0018\u000b\u0018\f\u0018\u028f\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0298\b\u0019"+ + "\n\u0019\f\u0019\u029b\t\u0019\u0001\u0019\u0003\u0019\u029e\b\u0019\u0001"+ + "\u0019\u0003\u0019\u02a1\b\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u02aa\b\u001a\n"+ + "\u001a\f\u001a\u02ad\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b5\b\u001b\u000b\u001b\f"+ + "\u001b\u02b6\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0003!\u02ca\b!\u0001"+ + "!\u0004!\u02cd\b!\u000b!\f!\u02ce\u0001\"\u0001\"\u0001#\u0001#\u0001"+ + "$\u0001$\u0001$\u0003$\u02d8\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003"+ + "&\u02df\b&\u0001\'\u0001\'\u0001\'\u0005\'\u02e4\b\'\n\'\f\'\u02e7\t\'"+ + "\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0005\'\u02ef\b\'\n\'"+ + "\f\'\u02f2\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u02f9\b"+ + "\'\u0001\'\u0003\'\u02fc\b\'\u0003\'\u02fe\b\'\u0001(\u0004(\u0301\b("+ + "\u000b(\f(\u0302\u0001)\u0004)\u0306\b)\u000b)\f)\u0307\u0001)\u0001)"+ + "\u0005)\u030c\b)\n)\f)\u030f\t)\u0001)\u0001)\u0004)\u0313\b)\u000b)\f"+ + ")\u0314\u0001)\u0004)\u0318\b)\u000b)\f)\u0319\u0001)\u0001)\u0005)\u031e"+ + "\b)\n)\f)\u0321\t)\u0003)\u0323\b)\u0001)\u0001)\u0001)\u0001)\u0004)"+ + "\u0329\b)\u000b)\f)\u032a\u0001)\u0001)\u0003)\u032f\b)\u0001*\u0001*"+ + "\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ + "3\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ + "F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0003O\u03af\bO\u0001O\u0005O\u03b2\bO\n"+ + "O\fO\u03b5\tO\u0001O\u0001O\u0004O\u03b9\bO\u000bO\fO\u03ba\u0003O\u03bd"+ + "\bO\u0001P\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001R\u0001R\u0005R\u03cb\bR\nR\fR\u03ce\tR\u0001R\u0001R\u0003R\u03d2"+ + "\bR\u0001R\u0004R\u03d5\bR\u000bR\fR\u03d6\u0003R\u03d9\bR\u0001S\u0001"+ + "S\u0004S\u03dd\bS\u000bS\fS\u03de\u0001S\u0001S\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0003"+ + "d\u042c\bd\u0001e\u0004e\u042f\be\u000be\fe\u0430\u0001f\u0001f\u0001"+ + "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ + "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ + "k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0003p\u0462\bp\u0001q\u0001q\u0003q\u0466"+ + "\bq\u0001q\u0005q\u0469\bq\nq\fq\u046c\tq\u0001q\u0001q\u0003q\u0470\b"+ + "q\u0001q\u0004q\u0473\bq\u000bq\fq\u0474\u0003q\u0477\bq\u0001r\u0001"+ + "r\u0004r\u047b\br\u000br\fr\u047c\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001"+ + "}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ + "\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0086\u0004"+ + "\u0086\u04d2\b\u0086\u000b\u0086\f\u0086\u04d3\u0001\u0086\u0001\u0086"+ + "\u0003\u0086\u04d8\b\u0086\u0001\u0086\u0004\u0086\u04db\b\u0086\u000b"+ + "\u0086\f\u0086\u04dc\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ + "\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001"+ + "\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ + "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001"+ - "\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001"+ - "\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001"+ - "\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c5\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0002"+ - "\u0261\u02a8\u0000\u00c7\u000f\u0001\u0011\u0002\u0013\u0003\u0015\u0004"+ - "\u0017\u0005\u0019\u0006\u001b\u0007\u001d\b\u001f\t!\n#\u000b%\f\'\r"+ - ")\u000e+\u000f-\u0010/\u00111\u00123\u00135\u00147\u00159\u0016;\u0017"+ - "=\u0018?\u0019A\u0000C\u0000E\u0000G\u0000I\u0000K\u0000M\u0000O\u0000"+ - "Q\u0000S\u0000U\u001aW\u001bY\u001c[\u001d]\u001e_\u001fa c!e\"g#i$k%"+ - "m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u00894\u008b5\u008d"+ - "6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d>\u009f?\u00a1"+ - "\u0000\u00a3\u0000\u00a5@\u00a7A\u00a9B\u00abC\u00ad\u0000\u00afD\u00b1"+ - "E\u00b3F\u00b5G\u00b7\u0000\u00b9\u0000\u00bbH\u00bdI\u00bfJ\u00c1\u0000"+ - "\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cb\u0000\u00cdK\u00cf"+ - "\u0000\u00d1L\u00d3\u0000\u00d5\u0000\u00d7M\u00d9N\u00dbO\u00dd\u0000"+ - "\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5\u0000\u00e7\u0000\u00e9\u0000"+ - "\u00ebP\u00edQ\u00efR\u00f1S\u00f3\u0000\u00f5\u0000\u00f7\u0000\u00f9"+ - "\u0000\u00fb\u0000\u00fd\u0000\u00ffT\u0101\u0000\u0103U\u0105V\u0107"+ - "W\u0109\u0000\u010b\u0000\u010dX\u010fY\u0111\u0000\u0113Z\u0115\u0000"+ - "\u0117[\u0119\\\u011b]\u011d\u0000\u011f\u0000\u0121\u0000\u0123\u0000"+ - "\u0125\u0000\u0127\u0000\u0129\u0000\u012b\u0000\u012d\u0000\u012f^\u0131"+ - "_\u0133`\u0135\u0000\u0137\u0000\u0139\u0000\u013b\u0000\u013d\u0000\u013f"+ - "\u0000\u0141a\u0143b\u0145c\u0147\u0000\u0149d\u014be\u014df\u014fg\u0151"+ - "\u0000\u0153\u0000\u0155h\u0157i\u0159j\u015bk\u015d\u0000\u015f\u0000"+ - "\u0161\u0000\u0163\u0000\u0165\u0000\u0167\u0000\u0169\u0000\u016bl\u016d"+ - "m\u016fn\u0171\u0000\u0173\u0000\u0175\u0000\u0177\u0000\u0179o\u017b"+ - "p\u017dq\u017f\u0000\u0181\u0000\u0183\u0000\u0185r\u0187s\u0189t\u018b"+ - "\u0000\u018d\u0000\u018fu\u0191v\u0193w\u0195\u0000\u0197\u0000\u0199"+ - "\u0000\u019b\u0000\u000f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ - "\b\t\n\u000b\f\r\u000e#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SS"+ - "ss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002"+ - "\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000"+ - "VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002"+ - "\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000"+ - "UUuu\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r"+ - "\r \u0001\u000009\u0002\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000"+ - "\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000"+ - "YYyy\u000b\u0000\t\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000"+ - "\t\n\r\r \"#,,//::<<>?\\\\||\u05e8\u0000\u000f\u0001\u0000\u0000\u0000"+ - "\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000"+ - "\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000"+ - "\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000"+ - "\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000"+ - "\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%"+ - "\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001"+ - "\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000"+ - "\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u0000"+ - "3\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001"+ - "\u0000\u0000\u0000\u00009\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000"+ - "\u0000\u0000=\u0001\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0001"+ - "U\u0001\u0000\u0000\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001"+ - "\u0000\u0000\u0000\u0001[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000"+ - "\u0000\u0001_\u0001\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001"+ - "c\u0001\u0000\u0000\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001"+ - "\u0000\u0000\u0000\u0001i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000"+ - "\u0000\u0001m\u0001\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001"+ - "q\u0001\u0000\u0000\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001"+ - "\u0000\u0000\u0000\u0001w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000"+ - "\u0000\u0001{\u0001\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001"+ - "\u007f\u0001\u0000\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001"+ - "\u0083\u0001\u0000\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001"+ - "\u0087\u0001\u0000\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001"+ - "\u008b\u0001\u0000\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001"+ - "\u008f\u0001\u0000\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001"+ - "\u0093\u0001\u0000\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001"+ - "\u0097\u0001\u0000\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001"+ - "\u009b\u0001\u0000\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001"+ - "\u009f\u0001\u0000\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001"+ - "\u00a3\u0001\u0000\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001"+ - "\u00a7\u0001\u0000\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001"+ - "\u00ab\u0001\u0000\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001"+ - "\u00b1\u0001\u0000\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0001"+ - "\u00b5\u0001\u0000\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002"+ - "\u00b9\u0001\u0000\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002"+ - "\u00bd\u0001\u0000\u0000\u0000\u0002\u00bf\u0001\u0000\u0000\u0000\u0003"+ - "\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003"+ - "\u00c5\u0001\u0000\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003"+ - "\u00c9\u0001\u0000\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003"+ - "\u00cd\u0001\u0000\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003"+ - "\u00d3\u0001\u0000\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003"+ - "\u00d7\u0001\u0000\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0003"+ - "\u00db\u0001\u0000\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004"+ - "\u00df\u0001\u0000\u0000\u0000\u0004\u00e1\u0001\u0000\u0000\u0000\u0004"+ - "\u00e3\u0001\u0000\u0000\u0000\u0004\u00e5\u0001\u0000\u0000\u0000\u0004"+ - "\u00eb\u0001\u0000\u0000\u0000\u0004\u00ed\u0001\u0000\u0000\u0000\u0004"+ - "\u00ef\u0001\u0000\u0000\u0000\u0004\u00f1\u0001\u0000\u0000\u0000\u0005"+ - "\u00f3\u0001\u0000\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005"+ - "\u00f7\u0001\u0000\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005"+ - "\u00fb\u0001\u0000\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0005"+ - "\u00ff\u0001\u0000\u0000\u0000\u0005\u0101\u0001\u0000\u0000\u0000\u0005"+ - "\u0103\u0001\u0000\u0000\u0000\u0005\u0105\u0001\u0000\u0000\u0000\u0005"+ - "\u0107\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000\u0000\u0000\u0006"+ - "\u010b\u0001\u0000\u0000\u0000\u0006\u010d\u0001\u0000\u0000\u0000\u0006"+ - "\u010f\u0001\u0000\u0000\u0000\u0006\u0113\u0001\u0000\u0000\u0000\u0006"+ - "\u0115\u0001\u0000\u0000\u0000\u0006\u0117\u0001\u0000\u0000\u0000\u0006"+ - "\u0119\u0001\u0000\u0000\u0000\u0006\u011b\u0001\u0000\u0000\u0000\u0007"+ - "\u011d\u0001\u0000\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007"+ - "\u0121\u0001\u0000\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007"+ - "\u0125\u0001\u0000\u0000\u0000\u0007\u0127\u0001\u0000\u0000\u0000\u0007"+ - "\u0129\u0001\u0000\u0000\u0000\u0007\u012b\u0001\u0000\u0000\u0000\u0007"+ - "\u012d\u0001\u0000\u0000\u0000\u0007\u012f\u0001\u0000\u0000\u0000\u0007"+ - "\u0131\u0001\u0000\u0000\u0000\u0007\u0133\u0001\u0000\u0000\u0000\b\u0135"+ - "\u0001\u0000\u0000\u0000\b\u0137\u0001\u0000\u0000\u0000\b\u0139\u0001"+ - "\u0000\u0000\u0000\b\u013b\u0001\u0000\u0000\u0000\b\u013d\u0001\u0000"+ - "\u0000\u0000\b\u013f\u0001\u0000\u0000\u0000\b\u0141\u0001\u0000\u0000"+ - "\u0000\b\u0143\u0001\u0000\u0000\u0000\b\u0145\u0001\u0000\u0000\u0000"+ - "\t\u0147\u0001\u0000\u0000\u0000\t\u0149\u0001\u0000\u0000\u0000\t\u014b"+ - "\u0001\u0000\u0000\u0000\t\u014d\u0001\u0000\u0000\u0000\t\u014f\u0001"+ - "\u0000\u0000\u0000\n\u0151\u0001\u0000\u0000\u0000\n\u0153\u0001\u0000"+ - "\u0000\u0000\n\u0155\u0001\u0000\u0000\u0000\n\u0157\u0001\u0000\u0000"+ - "\u0000\n\u0159\u0001\u0000\u0000\u0000\n\u015b\u0001\u0000\u0000\u0000"+ - "\u000b\u015d\u0001\u0000\u0000\u0000\u000b\u015f\u0001\u0000\u0000\u0000"+ - "\u000b\u0161\u0001\u0000\u0000\u0000\u000b\u0163\u0001\u0000\u0000\u0000"+ - "\u000b\u0165\u0001\u0000\u0000\u0000\u000b\u0167\u0001\u0000\u0000\u0000"+ - "\u000b\u0169\u0001\u0000\u0000\u0000\u000b\u016b\u0001\u0000\u0000\u0000"+ - "\u000b\u016d\u0001\u0000\u0000\u0000\u000b\u016f\u0001\u0000\u0000\u0000"+ - "\f\u0171\u0001\u0000\u0000\u0000\f\u0173\u0001\u0000\u0000\u0000\f\u0175"+ - "\u0001\u0000\u0000\u0000\f\u0177\u0001\u0000\u0000\u0000\f\u0179\u0001"+ - "\u0000\u0000\u0000\f\u017b\u0001\u0000\u0000\u0000\f\u017d\u0001\u0000"+ - "\u0000\u0000\r\u017f\u0001\u0000\u0000\u0000\r\u0181\u0001\u0000\u0000"+ - "\u0000\r\u0183\u0001\u0000\u0000\u0000\r\u0185\u0001\u0000\u0000\u0000"+ - "\r\u0187\u0001\u0000\u0000\u0000\r\u0189\u0001\u0000\u0000\u0000\u000e"+ - "\u018b\u0001\u0000\u0000\u0000\u000e\u018d\u0001\u0000\u0000\u0000\u000e"+ - "\u018f\u0001\u0000\u0000\u0000\u000e\u0191\u0001\u0000\u0000\u0000\u000e"+ - "\u0193\u0001\u0000\u0000\u0000\u000e\u0195\u0001\u0000\u0000\u0000\u000e"+ - "\u0197\u0001\u0000\u0000\u0000\u000e\u0199\u0001\u0000\u0000\u0000\u000e"+ - "\u019b\u0001\u0000\u0000\u0000\u000f\u019d\u0001\u0000\u0000\u0000\u0011"+ - "\u01a7\u0001\u0000\u0000\u0000\u0013\u01ae\u0001\u0000\u0000\u0000\u0015"+ - "\u01b7\u0001\u0000\u0000\u0000\u0017\u01be\u0001\u0000\u0000\u0000\u0019"+ - "\u01c8\u0001\u0000\u0000\u0000\u001b\u01cf\u0001\u0000\u0000\u0000\u001d"+ - "\u01d6\u0001\u0000\u0000\u0000\u001f\u01dd\u0001\u0000\u0000\u0000!\u01e5"+ - "\u0001\u0000\u0000\u0000#\u01f1\u0001\u0000\u0000\u0000%\u01fa\u0001\u0000"+ - "\u0000\u0000\'\u0200\u0001\u0000\u0000\u0000)\u0207\u0001\u0000\u0000"+ - "\u0000+\u020e\u0001\u0000\u0000\u0000-\u0216\u0001\u0000\u0000\u0000/"+ - "\u021e\u0001\u0000\u0000\u00001\u022d\u0001\u0000\u0000\u00003\u0237\u0001"+ - "\u0000\u0000\u00005\u0243\u0001\u0000\u0000\u00007\u0249\u0001\u0000\u0000"+ - "\u00009\u025a\u0001\u0000\u0000\u0000;\u026a\u0001\u0000\u0000\u0000="+ - "\u0270\u0001\u0000\u0000\u0000?\u0272\u0001\u0000\u0000\u0000A\u0276\u0001"+ - "\u0000\u0000\u0000C\u0278\u0001\u0000\u0000\u0000E\u027a\u0001\u0000\u0000"+ - "\u0000G\u027d\u0001\u0000\u0000\u0000I\u027f\u0001\u0000\u0000\u0000K"+ - "\u0288\u0001\u0000\u0000\u0000M\u028a\u0001\u0000\u0000\u0000O\u028f\u0001"+ - "\u0000\u0000\u0000Q\u0291\u0001\u0000\u0000\u0000S\u0296\u0001\u0000\u0000"+ - "\u0000U\u02b5\u0001\u0000\u0000\u0000W\u02b8\u0001\u0000\u0000\u0000Y"+ - "\u02e6\u0001\u0000\u0000\u0000[\u02e8\u0001\u0000\u0000\u0000]\u02eb\u0001"+ - "\u0000\u0000\u0000_\u02ef\u0001\u0000\u0000\u0000a\u02f3\u0001\u0000\u0000"+ - "\u0000c\u02f5\u0001\u0000\u0000\u0000e\u02f8\u0001\u0000\u0000\u0000g"+ - "\u02fa\u0001\u0000\u0000\u0000i\u02ff\u0001\u0000\u0000\u0000k\u0301\u0001"+ - "\u0000\u0000\u0000m\u0307\u0001\u0000\u0000\u0000o\u030d\u0001\u0000\u0000"+ - "\u0000q\u0310\u0001\u0000\u0000\u0000s\u0313\u0001\u0000\u0000\u0000u"+ - "\u0318\u0001\u0000\u0000\u0000w\u031d\u0001\u0000\u0000\u0000y\u031f\u0001"+ - "\u0000\u0000\u0000{\u0323\u0001\u0000\u0000\u0000}\u0328\u0001\u0000\u0000"+ - "\u0000\u007f\u032e\u0001\u0000\u0000\u0000\u0081\u0331\u0001\u0000\u0000"+ - "\u0000\u0083\u0333\u0001\u0000\u0000\u0000\u0085\u0339\u0001\u0000\u0000"+ - "\u0000\u0087\u033b\u0001\u0000\u0000\u0000\u0089\u0340\u0001\u0000\u0000"+ - "\u0000\u008b\u0343\u0001\u0000\u0000\u0000\u008d\u0346\u0001\u0000\u0000"+ - "\u0000\u008f\u0349\u0001\u0000\u0000\u0000\u0091\u034b\u0001\u0000\u0000"+ - "\u0000\u0093\u034e\u0001\u0000\u0000\u0000\u0095\u0350\u0001\u0000\u0000"+ - "\u0000\u0097\u0353\u0001\u0000\u0000\u0000\u0099\u0355\u0001\u0000\u0000"+ - "\u0000\u009b\u0357\u0001\u0000\u0000\u0000\u009d\u0359\u0001\u0000\u0000"+ - "\u0000\u009f\u035b\u0001\u0000\u0000\u0000\u00a1\u035d\u0001\u0000\u0000"+ - "\u0000\u00a3\u0362\u0001\u0000\u0000\u0000\u00a5\u0377\u0001\u0000\u0000"+ - "\u0000\u00a7\u0379\u0001\u0000\u0000\u0000\u00a9\u037e\u0001\u0000\u0000"+ - "\u0000\u00ab\u0393\u0001\u0000\u0000\u0000\u00ad\u0395\u0001\u0000\u0000"+ - "\u0000\u00af\u039d\u0001\u0000\u0000\u0000\u00b1\u039f\u0001\u0000\u0000"+ - "\u0000\u00b3\u03a3\u0001\u0000\u0000\u0000\u00b5\u03a7\u0001\u0000\u0000"+ - "\u0000\u00b7\u03ab\u0001\u0000\u0000\u0000\u00b9\u03b0\u0001\u0000\u0000"+ - "\u0000\u00bb\u03b5\u0001\u0000\u0000\u0000\u00bd\u03b9\u0001\u0000\u0000"+ - "\u0000\u00bf\u03bd\u0001\u0000\u0000\u0000\u00c1\u03c1\u0001\u0000\u0000"+ - "\u0000\u00c3\u03c6\u0001\u0000\u0000\u0000\u00c5\u03ca\u0001\u0000\u0000"+ - "\u0000\u00c7\u03ce\u0001\u0000\u0000\u0000\u00c9\u03d2\u0001\u0000\u0000"+ - "\u0000\u00cb\u03d6\u0001\u0000\u0000\u0000\u00cd\u03da\u0001\u0000\u0000"+ - "\u0000\u00cf\u03e6\u0001\u0000\u0000\u0000\u00d1\u03e9\u0001\u0000\u0000"+ - "\u0000\u00d3\u03ed\u0001\u0000\u0000\u0000\u00d5\u03f1\u0001\u0000\u0000"+ - "\u0000\u00d7\u03f5\u0001\u0000\u0000\u0000\u00d9\u03f9\u0001\u0000\u0000"+ - "\u0000\u00db\u03fd\u0001\u0000\u0000\u0000\u00dd\u0401\u0001\u0000\u0000"+ - "\u0000\u00df\u0406\u0001\u0000\u0000\u0000\u00e1\u040a\u0001\u0000\u0000"+ - "\u0000\u00e3\u040e\u0001\u0000\u0000\u0000\u00e5\u0413\u0001\u0000\u0000"+ - "\u0000\u00e7\u041c\u0001\u0000\u0000\u0000\u00e9\u0431\u0001\u0000\u0000"+ - "\u0000\u00eb\u0435\u0001\u0000\u0000\u0000\u00ed\u0439\u0001\u0000\u0000"+ - "\u0000\u00ef\u043d\u0001\u0000\u0000\u0000\u00f1\u0441\u0001\u0000\u0000"+ - "\u0000\u00f3\u0445\u0001\u0000\u0000\u0000\u00f5\u044a\u0001\u0000\u0000"+ - "\u0000\u00f7\u044e\u0001\u0000\u0000\u0000\u00f9\u0452\u0001\u0000\u0000"+ - "\u0000\u00fb\u0456\u0001\u0000\u0000\u0000\u00fd\u045b\u0001\u0000\u0000"+ - "\u0000\u00ff\u0460\u0001\u0000\u0000\u0000\u0101\u0463\u0001\u0000\u0000"+ - "\u0000\u0103\u0467\u0001\u0000\u0000\u0000\u0105\u046b\u0001\u0000\u0000"+ - "\u0000\u0107\u046f\u0001\u0000\u0000\u0000\u0109\u0473\u0001\u0000\u0000"+ - "\u0000\u010b\u0478\u0001\u0000\u0000\u0000\u010d\u047d\u0001\u0000\u0000"+ - "\u0000\u010f\u0482\u0001\u0000\u0000\u0000\u0111\u0489\u0001\u0000\u0000"+ - "\u0000\u0113\u0492\u0001\u0000\u0000\u0000\u0115\u0499\u0001\u0000\u0000"+ - "\u0000\u0117\u049d\u0001\u0000\u0000\u0000\u0119\u04a1\u0001\u0000\u0000"+ - "\u0000\u011b\u04a5\u0001\u0000\u0000\u0000\u011d\u04a9\u0001\u0000\u0000"+ - "\u0000\u011f\u04af\u0001\u0000\u0000\u0000\u0121\u04b3\u0001\u0000\u0000"+ - "\u0000\u0123\u04b7\u0001\u0000\u0000\u0000\u0125\u04bb\u0001\u0000\u0000"+ - "\u0000\u0127\u04bf\u0001\u0000\u0000\u0000\u0129\u04c3\u0001\u0000\u0000"+ - "\u0000\u012b\u04c7\u0001\u0000\u0000\u0000\u012d\u04cc\u0001\u0000\u0000"+ - "\u0000\u012f\u04d1\u0001\u0000\u0000\u0000\u0131\u04d5\u0001\u0000\u0000"+ - "\u0000\u0133\u04d9\u0001\u0000\u0000\u0000\u0135\u04dd\u0001\u0000\u0000"+ - "\u0000\u0137\u04e2\u0001\u0000\u0000\u0000\u0139\u04e6\u0001\u0000\u0000"+ - "\u0000\u013b\u04eb\u0001\u0000\u0000\u0000\u013d\u04f0\u0001\u0000\u0000"+ - "\u0000\u013f\u04f4\u0001\u0000\u0000\u0000\u0141\u04f8\u0001\u0000\u0000"+ - "\u0000\u0143\u04fc\u0001\u0000\u0000\u0000\u0145\u0500\u0001\u0000\u0000"+ - "\u0000\u0147\u0504\u0001\u0000\u0000\u0000\u0149\u0509\u0001\u0000\u0000"+ - "\u0000\u014b\u050e\u0001\u0000\u0000\u0000\u014d\u0512\u0001\u0000\u0000"+ - "\u0000\u014f\u0516\u0001\u0000\u0000\u0000\u0151\u051a\u0001\u0000\u0000"+ - "\u0000\u0153\u051f\u0001\u0000\u0000\u0000\u0155\u0528\u0001\u0000\u0000"+ - "\u0000\u0157\u052c\u0001\u0000\u0000\u0000\u0159\u0530\u0001\u0000\u0000"+ - "\u0000\u015b\u0534\u0001\u0000\u0000\u0000\u015d\u0538\u0001\u0000\u0000"+ - "\u0000\u015f\u053d\u0001\u0000\u0000\u0000\u0161\u0541\u0001\u0000\u0000"+ - "\u0000\u0163\u0545\u0001\u0000\u0000\u0000\u0165\u0549\u0001\u0000\u0000"+ - "\u0000\u0167\u054e\u0001\u0000\u0000\u0000\u0169\u0552\u0001\u0000\u0000"+ - "\u0000\u016b\u0556\u0001\u0000\u0000\u0000\u016d\u055a\u0001\u0000\u0000"+ - "\u0000\u016f\u055e\u0001\u0000\u0000\u0000\u0171\u0562\u0001\u0000\u0000"+ - "\u0000\u0173\u0568\u0001\u0000\u0000\u0000\u0175\u056c\u0001\u0000\u0000"+ - "\u0000\u0177\u0570\u0001\u0000\u0000\u0000\u0179\u0574\u0001\u0000\u0000"+ - "\u0000\u017b\u0578\u0001\u0000\u0000\u0000\u017d\u057c\u0001\u0000\u0000"+ - "\u0000\u017f\u0580\u0001\u0000\u0000\u0000\u0181\u0585\u0001\u0000\u0000"+ - "\u0000\u0183\u058b\u0001\u0000\u0000\u0000\u0185\u0591\u0001\u0000\u0000"+ - "\u0000\u0187\u0595\u0001\u0000\u0000\u0000\u0189\u0599\u0001\u0000\u0000"+ - "\u0000\u018b\u059d\u0001\u0000\u0000\u0000\u018d\u05a3\u0001\u0000\u0000"+ - "\u0000\u018f\u05a9\u0001\u0000\u0000\u0000\u0191\u05ad\u0001\u0000\u0000"+ - "\u0000\u0193\u05b1\u0001\u0000\u0000\u0000\u0195\u05b5\u0001\u0000\u0000"+ - "\u0000\u0197\u05bb\u0001\u0000\u0000\u0000\u0199\u05c1\u0001\u0000\u0000"+ - "\u0000\u019b\u05c7\u0001\u0000\u0000\u0000\u019d\u019e\u0007\u0000\u0000"+ - "\u0000\u019e\u019f\u0007\u0001\u0000\u0000\u019f\u01a0\u0007\u0002\u0000"+ - "\u0000\u01a0\u01a1\u0007\u0002\u0000\u0000\u01a1\u01a2\u0007\u0003\u0000"+ - "\u0000\u01a2\u01a3\u0007\u0004\u0000\u0000\u01a3\u01a4\u0007\u0005\u0000"+ - "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6\u0006\u0000\u0000"+ - "\u0000\u01a6\u0010\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0000\u0000"+ - "\u0000\u01a8\u01a9\u0007\u0006\u0000\u0000\u01a9\u01aa\u0007\u0007\u0000"+ - "\u0000\u01aa\u01ab\u0007\b\u0000\u0000\u01ab\u01ac\u0001\u0000\u0000\u0000"+ - "\u01ac\u01ad\u0006\u0001\u0001\u0000\u01ad\u0012\u0001\u0000\u0000\u0000"+ - "\u01ae\u01af\u0007\u0003\u0000\u0000\u01af\u01b0\u0007\t\u0000\u0000\u01b0"+ - "\u01b1\u0007\u0006\u0000\u0000\u01b1\u01b2\u0007\u0001\u0000\u0000\u01b2"+ - "\u01b3\u0007\u0004\u0000\u0000\u01b3\u01b4\u0007\n\u0000\u0000\u01b4\u01b5"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b6\u0006\u0002\u0002\u0000\u01b6\u0014"+ - "\u0001\u0000\u0000\u0000\u01b7\u01b8\u0007\u0003\u0000\u0000\u01b8\u01b9"+ - "\u0007\u000b\u0000\u0000\u01b9\u01ba\u0007\f\u0000\u0000\u01ba\u01bb\u0007"+ - "\r\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\u0003"+ - "\u0000\u0000\u01bd\u0016\u0001\u0000\u0000\u0000\u01be\u01bf\u0007\u0003"+ - "\u0000\u0000\u01bf\u01c0\u0007\u000e\u0000\u0000\u01c0\u01c1\u0007\b\u0000"+ - "\u0000\u01c1\u01c2\u0007\r\u0000\u0000\u01c2\u01c3\u0007\f\u0000\u0000"+ - "\u01c3\u01c4\u0007\u0001\u0000\u0000\u01c4\u01c5\u0007\t\u0000\u0000\u01c5"+ - "\u01c6\u0001\u0000\u0000\u0000\u01c6\u01c7\u0006\u0004\u0003\u0000\u01c7"+ - "\u0018\u0001\u0000\u0000\u0000\u01c8\u01c9\u0007\u000f\u0000\u0000\u01c9"+ - "\u01ca\u0007\u0006\u0000\u0000\u01ca\u01cb\u0007\u0007\u0000\u0000\u01cb"+ - "\u01cc\u0007\u0010\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd"+ - "\u01ce\u0006\u0005\u0004\u0000\u01ce\u001a\u0001\u0000\u0000\u0000\u01cf"+ - "\u01d0\u0007\u0011\u0000\u0000\u01d0\u01d1\u0007\u0006\u0000\u0000\u01d1"+ - "\u01d2\u0007\u0007\u0000\u0000\u01d2\u01d3\u0007\u0012\u0000\u0000\u01d3"+ - "\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0006\u0000\u0000\u01d5"+ - "\u001c\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0012\u0000\u0000\u01d7"+ - "\u01d8\u0007\u0003\u0000\u0000\u01d8\u01d9\u0007\u0003\u0000\u0000\u01d9"+ - "\u01da\u0007\b\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0006\u0007\u0001\u0000\u01dc\u001e\u0001\u0000\u0000\u0000\u01dd\u01de"+ - "\u0007\r\u0000\u0000\u01de\u01df\u0007\u0001\u0000\u0000\u01df\u01e0\u0007"+ - "\u0010\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2\u0007"+ - "\u0005\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ - "\b\u0000\u0000\u01e4 \u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0010"+ - "\u0000\u0000\u01e6\u01e7\u0007\u000b\u0000\u0000\u01e7\u01e8\u0005_\u0000"+ - "\u0000\u01e8\u01e9\u0007\u0003\u0000\u0000\u01e9\u01ea\u0007\u000e\u0000"+ - "\u0000\u01ea\u01eb\u0007\b\u0000\u0000\u01eb\u01ec\u0007\f\u0000\u0000"+ - "\u01ec\u01ed\u0007\t\u0000\u0000\u01ed\u01ee\u0007\u0000\u0000\u0000\u01ee"+ - "\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f0\u0006\t\u0005\u0000\u01f0\""+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0007\u0006\u0000\u0000\u01f2\u01f3"+ - "\u0007\u0003\u0000\u0000\u01f3\u01f4\u0007\t\u0000\u0000\u01f4\u01f5\u0007"+ - "\f\u0000\u0000\u01f5\u01f6\u0007\u0010\u0000\u0000\u01f6\u01f7\u0007\u0003"+ - "\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\n\u0006"+ - "\u0000\u01f9$\u0001\u0000\u0000\u0000\u01fa\u01fb\u0007\u0006\u0000\u0000"+ - "\u01fb\u01fc\u0007\u0007\u0000\u0000\u01fc\u01fd\u0007\u0013\u0000\u0000"+ - "\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe\u01ff\u0006\u000b\u0000\u0000"+ - "\u01ff&\u0001\u0000\u0000\u0000\u0200\u0201\u0007\u0002\u0000\u0000\u0201"+ - "\u0202\u0007\n\u0000\u0000\u0202\u0203\u0007\u0007\u0000\u0000\u0203\u0204"+ - "\u0007\u0013\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ - "\u0006\f\u0007\u0000\u0206(\u0001\u0000\u0000\u0000\u0207\u0208\u0007"+ - "\u0002\u0000\u0000\u0208\u0209\u0007\u0007\u0000\u0000\u0209\u020a\u0007"+ - "\u0006\u0000\u0000\u020a\u020b\u0007\u0005\u0000\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\r\u0000\u0000\u020d*\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0007\u0002\u0000\u0000\u020f\u0210\u0007\u0005"+ - "\u0000\u0000\u0210\u0211\u0007\f\u0000\u0000\u0211\u0212\u0007\u0005\u0000"+ - "\u0000\u0212\u0213\u0007\u0002\u0000\u0000\u0213\u0214\u0001\u0000\u0000"+ - "\u0000\u0214\u0215\u0006\u000e\u0000\u0000\u0215,\u0001\u0000\u0000\u0000"+ - "\u0216\u0217\u0007\u0013\u0000\u0000\u0217\u0218\u0007\n\u0000\u0000\u0218"+ - "\u0219\u0007\u0003\u0000\u0000\u0219\u021a\u0007\u0006\u0000\u0000\u021a"+ - "\u021b\u0007\u0003\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u0006\u000f\u0000\u0000\u021d.\u0001\u0000\u0000\u0000\u021e\u021f"+ - "\u0004\u0010\u0000\u0000\u021f\u0220\u0007\u0001\u0000\u0000\u0220\u0221"+ - "\u0007\t\u0000\u0000\u0221\u0222\u0007\r\u0000\u0000\u0222\u0223\u0007"+ - "\u0001\u0000\u0000\u0223\u0224\u0007\t\u0000\u0000\u0224\u0225\u0007\u0003"+ - "\u0000\u0000\u0225\u0226\u0007\u0002\u0000\u0000\u0226\u0227\u0007\u0005"+ - "\u0000\u0000\u0227\u0228\u0007\f\u0000\u0000\u0228\u0229\u0007\u0005\u0000"+ - "\u0000\u0229\u022a\u0007\u0002\u0000\u0000\u022a\u022b\u0001\u0000\u0000"+ - "\u0000\u022b\u022c\u0006\u0010\u0000\u0000\u022c0\u0001\u0000\u0000\u0000"+ - "\u022d\u022e\u0004\u0011\u0001\u0000\u022e\u022f\u0007\r\u0000\u0000\u022f"+ - "\u0230\u0007\u0007\u0000\u0000\u0230\u0231\u0007\u0007\u0000\u0000\u0231"+ - "\u0232\u0007\u0012\u0000\u0000\u0232\u0233\u0007\u0014\u0000\u0000\u0233"+ - "\u0234\u0007\b\u0000\u0000\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236"+ - "\u0006\u0011\b\u0000\u02362\u0001\u0000\u0000\u0000\u0237\u0238\u0004"+ - "\u0012\u0002\u0000\u0238\u0239\u0007\u0010\u0000\u0000\u0239\u023a\u0007"+ - "\u0003\u0000\u0000\u023a\u023b\u0007\u0005\u0000\u0000\u023b\u023c\u0007"+ - "\u0006\u0000\u0000\u023c\u023d\u0007\u0001\u0000\u0000\u023d\u023e\u0007"+ - "\u0004\u0000\u0000\u023e\u023f\u0007\u0002\u0000\u0000\u023f\u0240\u0001"+ - "\u0000\u0000\u0000\u0240\u0241\u0006\u0012\t\u0000\u02414\u0001\u0000"+ - "\u0000\u0000\u0242\u0244\b\u0015\u0000\u0000\u0243\u0242\u0001\u0000\u0000"+ - "\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000"+ - "\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000"+ - "\u0000\u0247\u0248\u0006\u0013\u0000\u0000\u02486\u0001\u0000\u0000\u0000"+ - "\u0249\u024a\u0005/\u0000\u0000\u024a\u024b\u0005/\u0000\u0000\u024b\u024f"+ - "\u0001\u0000\u0000\u0000\u024c\u024e\b\u0016\u0000\u0000\u024d\u024c\u0001"+ - "\u0000\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001"+ - "\u0000\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0253\u0001"+ - "\u0000\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0254\u0005"+ - "\r\u0000\u0000\u0253\u0252\u0001\u0000\u0000\u0000\u0253\u0254\u0001\u0000"+ - "\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0257\u0005\n\u0000"+ - "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000\u0000"+ - "\u0000\u0257\u0258\u0001\u0000\u0000\u0000\u0258\u0259\u0006\u0014\n\u0000"+ - "\u02598\u0001\u0000\u0000\u0000\u025a\u025b\u0005/\u0000\u0000\u025b\u025c"+ - "\u0005*\u0000\u0000\u025c\u0261\u0001\u0000\u0000\u0000\u025d\u0260\u0003"+ - "9\u0015\u0000\u025e\u0260\t\u0000\u0000\u0000\u025f\u025d\u0001\u0000"+ - "\u0000\u0000\u025f\u025e\u0001\u0000\u0000\u0000\u0260\u0263\u0001\u0000"+ - "\u0000\u0000\u0261\u0262\u0001\u0000\u0000\u0000\u0261\u025f\u0001\u0000"+ - "\u0000\u0000\u0262\u0264\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0005*\u0000\u0000\u0265\u0266\u0005/\u0000\u0000"+ - "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0015\n\u0000\u0268"+ - ":\u0001\u0000\u0000\u0000\u0269\u026b\u0007\u0017\u0000\u0000\u026a\u0269"+ - "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d\u026e"+ - "\u0001\u0000\u0000\u0000\u026e\u026f\u0006\u0016\n\u0000\u026f<\u0001"+ - "\u0000\u0000\u0000\u0270\u0271\u0005:\u0000\u0000\u0271>\u0001\u0000\u0000"+ - "\u0000\u0272\u0273\u0005|\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000"+ - "\u0274\u0275\u0006\u0018\u000b\u0000\u0275@\u0001\u0000\u0000\u0000\u0276"+ - "\u0277\u0007\u0018\u0000\u0000\u0277B\u0001\u0000\u0000\u0000\u0278\u0279"+ - "\u0007\u0019\u0000\u0000\u0279D\u0001\u0000\u0000\u0000\u027a\u027b\u0005"+ - "\\\u0000\u0000\u027b\u027c\u0007\u001a\u0000\u0000\u027cF\u0001\u0000"+ - "\u0000\u0000\u027d\u027e\b\u001b\u0000\u0000\u027eH\u0001\u0000\u0000"+ - "\u0000\u027f\u0281\u0007\u0003\u0000\u0000\u0280\u0282\u0007\u001c\u0000"+ - "\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0281\u0282\u0001\u0000\u0000"+ - "\u0000\u0282\u0284\u0001\u0000\u0000\u0000\u0283\u0285\u0003A\u0019\u0000"+ - "\u0284\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000"+ - "\u0286\u0284\u0001\u0000\u0000\u0000\u0286\u0287\u0001\u0000\u0000\u0000"+ - "\u0287J\u0001\u0000\u0000\u0000\u0288\u0289\u0005@\u0000\u0000\u0289L"+ - "\u0001\u0000\u0000\u0000\u028a\u028b\u0005`\u0000\u0000\u028bN\u0001\u0000"+ - "\u0000\u0000\u028c\u0290\b\u001d\u0000\u0000\u028d\u028e\u0005`\u0000"+ - "\u0000\u028e\u0290\u0005`\u0000\u0000\u028f\u028c\u0001\u0000\u0000\u0000"+ - "\u028f\u028d\u0001\u0000\u0000\u0000\u0290P\u0001\u0000\u0000\u0000\u0291"+ - "\u0292\u0005_\u0000\u0000\u0292R\u0001\u0000\u0000\u0000\u0293\u0297\u0003"+ - "C\u001a\u0000\u0294\u0297\u0003A\u0019\u0000\u0295\u0297\u0003Q!\u0000"+ - "\u0296\u0293\u0001\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000"+ - "\u0296\u0295\u0001\u0000\u0000\u0000\u0297T\u0001\u0000\u0000\u0000\u0298"+ - "\u029d\u0005\"\u0000\u0000\u0299\u029c\u0003E\u001b\u0000\u029a\u029c"+ - "\u0003G\u001c\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b\u029a\u0001"+ - "\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d\u029b\u0001"+ - "\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000\u0000\u029e\u02a0\u0001"+ - "\u0000\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0\u02b6\u0005"+ - "\"\u0000\u0000\u02a1\u02a2\u0005\"\u0000\u0000\u02a2\u02a3\u0005\"\u0000"+ - "\u0000\u02a3\u02a4\u0005\"\u0000\u0000\u02a4\u02a8\u0001\u0000\u0000\u0000"+ - "\u02a5\u02a7\b\u0016\u0000\u0000\u02a6\u02a5\u0001\u0000\u0000\u0000\u02a7"+ - "\u02aa\u0001\u0000\u0000\u0000\u02a8\u02a9\u0001\u0000\u0000\u0000\u02a8"+ - "\u02a6\u0001\u0000\u0000\u0000\u02a9\u02ab\u0001\u0000\u0000\u0000\u02aa"+ - "\u02a8\u0001\u0000\u0000\u0000\u02ab\u02ac\u0005\"\u0000\u0000\u02ac\u02ad"+ - "\u0005\"\u0000\u0000\u02ad\u02ae\u0005\"\u0000\u0000\u02ae\u02b0\u0001"+ - "\u0000\u0000\u0000\u02af\u02b1\u0005\"\u0000\u0000\u02b0\u02af\u0001\u0000"+ - "\u0000\u0000\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b3\u0001\u0000"+ - "\u0000\u0000\u02b2\u02b4\u0005\"\u0000\u0000\u02b3\u02b2\u0001\u0000\u0000"+ - "\u0000\u02b3\u02b4\u0001\u0000\u0000\u0000\u02b4\u02b6\u0001\u0000\u0000"+ - "\u0000\u02b5\u0298\u0001\u0000\u0000\u0000\u02b5\u02a1\u0001\u0000\u0000"+ - "\u0000\u02b6V\u0001\u0000\u0000\u0000\u02b7\u02b9\u0003A\u0019\u0000\u02b8"+ - "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ - "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ - "X\u0001\u0000\u0000\u0000\u02bc\u02be\u0003A\u0019\u0000\u02bd\u02bc\u0001"+ - "\u0000\u0000\u0000\u02be\u02bf\u0001\u0000\u0000\u0000\u02bf\u02bd\u0001"+ - "\u0000\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0001"+ - "\u0000\u0000\u0000\u02c1\u02c5\u0003i-\u0000\u02c2\u02c4\u0003A\u0019"+ - "\u0000\u02c3\u02c2\u0001\u0000\u0000\u0000\u02c4\u02c7\u0001\u0000\u0000"+ - "\u0000\u02c5\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001\u0000\u0000"+ - "\u0000\u02c6\u02e7\u0001\u0000\u0000\u0000\u02c7\u02c5\u0001\u0000\u0000"+ - "\u0000\u02c8\u02ca\u0003i-\u0000\u02c9\u02cb\u0003A\u0019\u0000\u02ca"+ - "\u02c9\u0001\u0000\u0000\u0000\u02cb\u02cc\u0001\u0000\u0000\u0000\u02cc"+ - "\u02ca\u0001\u0000\u0000\u0000\u02cc\u02cd\u0001\u0000\u0000\u0000\u02cd"+ - "\u02e7\u0001\u0000\u0000\u0000\u02ce\u02d0\u0003A\u0019\u0000\u02cf\u02ce"+ - "\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000\u0000\u0000\u02d1\u02cf"+ - "\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02da"+ - "\u0001\u0000\u0000\u0000\u02d3\u02d7\u0003i-\u0000\u02d4\u02d6\u0003A"+ - "\u0019\u0000\u02d5\u02d4\u0001\u0000\u0000\u0000\u02d6\u02d9\u0001\u0000"+ - "\u0000\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000"+ - "\u0000\u0000\u02d8\u02db\u0001\u0000\u0000\u0000\u02d9\u02d7\u0001\u0000"+ - "\u0000\u0000\u02da\u02d3\u0001\u0000\u0000\u0000\u02da\u02db\u0001\u0000"+ - "\u0000\u0000\u02db\u02dc\u0001\u0000\u0000\u0000\u02dc\u02dd\u0003I\u001d"+ - "\u0000\u02dd\u02e7\u0001\u0000\u0000\u0000\u02de\u02e0\u0003i-\u0000\u02df"+ - "\u02e1\u0003A\u0019\u0000\u02e0\u02df\u0001\u0000\u0000\u0000\u02e1\u02e2"+ - "\u0001\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e3"+ - "\u0001\u0000\u0000\u0000\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u02e5"+ - "\u0003I\u001d\u0000\u02e5\u02e7\u0001\u0000\u0000\u0000\u02e6\u02bd\u0001"+ - "\u0000\u0000\u0000\u02e6\u02c8\u0001\u0000\u0000\u0000\u02e6\u02cf\u0001"+ - "\u0000\u0000\u0000\u02e6\u02de\u0001\u0000\u0000\u0000\u02e7Z\u0001\u0000"+ - "\u0000\u0000\u02e8\u02e9\u0007\u001e\u0000\u0000\u02e9\u02ea\u0007\u001f"+ - "\u0000\u0000\u02ea\\\u0001\u0000\u0000\u0000\u02eb\u02ec\u0007\f\u0000"+ - "\u0000\u02ec\u02ed\u0007\t\u0000\u0000\u02ed\u02ee\u0007\u0000\u0000\u0000"+ - "\u02ee^\u0001\u0000\u0000\u0000\u02ef\u02f0\u0007\f\u0000\u0000\u02f0"+ - "\u02f1\u0007\u0002\u0000\u0000\u02f1\u02f2\u0007\u0004\u0000\u0000\u02f2"+ - "`\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4b\u0001"+ - "\u0000\u0000\u0000\u02f5\u02f6\u0005:\u0000\u0000\u02f6\u02f7\u0005:\u0000"+ - "\u0000\u02f7d\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005,\u0000\u0000\u02f9"+ - "f\u0001\u0000\u0000\u0000\u02fa\u02fb\u0007\u0000\u0000\u0000\u02fb\u02fc"+ - "\u0007\u0003\u0000\u0000\u02fc\u02fd\u0007\u0002\u0000\u0000\u02fd\u02fe"+ - "\u0007\u0004\u0000\u0000\u02feh\u0001\u0000\u0000\u0000\u02ff\u0300\u0005"+ - ".\u0000\u0000\u0300j\u0001\u0000\u0000\u0000\u0301\u0302\u0007\u000f\u0000"+ - "\u0000\u0302\u0303\u0007\f\u0000\u0000\u0303\u0304\u0007\r\u0000\u0000"+ - "\u0304\u0305\u0007\u0002\u0000\u0000\u0305\u0306\u0007\u0003\u0000\u0000"+ - "\u0306l\u0001\u0000\u0000\u0000\u0307\u0308\u0007\u000f\u0000\u0000\u0308"+ - "\u0309\u0007\u0001\u0000\u0000\u0309\u030a\u0007\u0006\u0000\u0000\u030a"+ - "\u030b\u0007\u0002\u0000\u0000\u030b\u030c\u0007\u0005\u0000\u0000\u030c"+ - "n\u0001\u0000\u0000\u0000\u030d\u030e\u0007\u0001\u0000\u0000\u030e\u030f"+ - "\u0007\t\u0000\u0000\u030fp\u0001\u0000\u0000\u0000\u0310\u0311\u0007"+ - "\u0001\u0000\u0000\u0311\u0312\u0007\u0002\u0000\u0000\u0312r\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0007\r\u0000\u0000\u0314\u0315\u0007\f\u0000"+ - "\u0000\u0315\u0316\u0007\u0002\u0000\u0000\u0316\u0317\u0007\u0005\u0000"+ - "\u0000\u0317t\u0001\u0000\u0000\u0000\u0318\u0319\u0007\r\u0000\u0000"+ - "\u0319\u031a\u0007\u0001\u0000\u0000\u031a\u031b\u0007\u0012\u0000\u0000"+ - "\u031b\u031c\u0007\u0003\u0000\u0000\u031cv\u0001\u0000\u0000\u0000\u031d"+ - "\u031e\u0005(\u0000\u0000\u031ex\u0001\u0000\u0000\u0000\u031f\u0320\u0007"+ - "\t\u0000\u0000\u0320\u0321\u0007\u0007\u0000\u0000\u0321\u0322\u0007\u0005"+ - "\u0000\u0000\u0322z\u0001\u0000\u0000\u0000\u0323\u0324\u0007\t\u0000"+ - "\u0000\u0324\u0325\u0007\u0014\u0000\u0000\u0325\u0326\u0007\r\u0000\u0000"+ - "\u0326\u0327\u0007\r\u0000\u0000\u0327|\u0001\u0000\u0000\u0000\u0328"+ - "\u0329\u0007\t\u0000\u0000\u0329\u032a\u0007\u0014\u0000\u0000\u032a\u032b"+ - "\u0007\r\u0000\u0000\u032b\u032c\u0007\r\u0000\u0000\u032c\u032d\u0007"+ - "\u0002\u0000\u0000\u032d~\u0001\u0000\u0000\u0000\u032e\u032f\u0007\u0007"+ - "\u0000\u0000\u032f\u0330\u0007\u0006\u0000\u0000\u0330\u0080\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0005?\u0000\u0000\u0332\u0082\u0001\u0000\u0000"+ - "\u0000\u0333\u0334\u0007\u0006\u0000\u0000\u0334\u0335\u0007\r\u0000\u0000"+ - "\u0335\u0336\u0007\u0001\u0000\u0000\u0336\u0337\u0007\u0012\u0000\u0000"+ - "\u0337\u0338\u0007\u0003\u0000\u0000\u0338\u0084\u0001\u0000\u0000\u0000"+ - "\u0339\u033a\u0005)\u0000\u0000\u033a\u0086\u0001\u0000\u0000\u0000\u033b"+ - "\u033c\u0007\u0005\u0000\u0000\u033c\u033d\u0007\u0006\u0000\u0000\u033d"+ - "\u033e\u0007\u0014\u0000\u0000\u033e\u033f\u0007\u0003\u0000\u0000\u033f"+ - "\u0088\u0001\u0000\u0000\u0000\u0340\u0341\u0005=\u0000\u0000\u0341\u0342"+ - "\u0005=\u0000\u0000\u0342\u008a\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ - "=\u0000\u0000\u0344\u0345\u0005~\u0000\u0000\u0345\u008c\u0001\u0000\u0000"+ - "\u0000\u0346\u0347\u0005!\u0000\u0000\u0347\u0348\u0005=\u0000\u0000\u0348"+ - "\u008e\u0001\u0000\u0000\u0000\u0349\u034a\u0005<\u0000\u0000\u034a\u0090"+ - "\u0001\u0000\u0000\u0000\u034b\u034c\u0005<\u0000\u0000\u034c\u034d\u0005"+ - "=\u0000\u0000\u034d\u0092\u0001\u0000\u0000\u0000\u034e\u034f\u0005>\u0000"+ - "\u0000\u034f\u0094\u0001\u0000\u0000\u0000\u0350\u0351\u0005>\u0000\u0000"+ - "\u0351\u0352\u0005=\u0000\u0000\u0352\u0096\u0001\u0000\u0000\u0000\u0353"+ - "\u0354\u0005+\u0000\u0000\u0354\u0098\u0001\u0000\u0000\u0000\u0355\u0356"+ - "\u0005-\u0000\u0000\u0356\u009a\u0001\u0000\u0000\u0000\u0357\u0358\u0005"+ - "*\u0000\u0000\u0358\u009c\u0001\u0000\u0000\u0000\u0359\u035a\u0005/\u0000"+ - "\u0000\u035a\u009e\u0001\u0000\u0000\u0000\u035b\u035c\u0005%\u0000\u0000"+ - "\u035c\u00a0\u0001\u0000\u0000\u0000\u035d\u035e\u0004I\u0003\u0000\u035e"+ - "\u035f\u0003=\u0017\u0000\u035f\u0360\u0001\u0000\u0000\u0000\u0360\u0361"+ - "\u0006I\f\u0000\u0361\u00a2\u0001\u0000\u0000\u0000\u0362\u0363\u0003"+ - "-\u000f\u0000\u0363\u0364\u0001\u0000\u0000\u0000\u0364\u0365\u0006J\r"+ - "\u0000\u0365\u00a4\u0001\u0000\u0000\u0000\u0366\u0369\u0003\u00819\u0000"+ - "\u0367\u036a\u0003C\u001a\u0000\u0368\u036a\u0003Q!\u0000\u0369\u0367"+ - "\u0001\u0000\u0000\u0000\u0369\u0368\u0001\u0000\u0000\u0000\u036a\u036e"+ - "\u0001\u0000\u0000\u0000\u036b\u036d\u0003S\"\u0000\u036c\u036b\u0001"+ - "\u0000\u0000\u0000\u036d\u0370\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ - "\u0000\u0000\u0000\u036e\u036f\u0001\u0000\u0000\u0000\u036f\u0378\u0001"+ - "\u0000\u0000\u0000\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u0373\u0003"+ - "\u00819\u0000\u0372\u0374\u0003A\u0019\u0000\u0373\u0372\u0001\u0000\u0000"+ - "\u0000\u0374\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000"+ - "\u0000\u0375\u0376\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000"+ - "\u0000\u0377\u0366\u0001\u0000\u0000\u0000\u0377\u0371\u0001\u0000\u0000"+ - "\u0000\u0378\u00a6\u0001\u0000\u0000\u0000\u0379\u037a\u0005[\u0000\u0000"+ - "\u037a\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006L\u0000\u0000\u037c"+ - "\u037d\u0006L\u0000\u0000\u037d\u00a8\u0001\u0000\u0000\u0000\u037e\u037f"+ - "\u0005]\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380\u0381\u0006"+ - "M\u000b\u0000\u0381\u0382\u0006M\u000b\u0000\u0382\u00aa\u0001\u0000\u0000"+ - "\u0000\u0383\u0387\u0003C\u001a\u0000\u0384\u0386\u0003S\"\u0000\u0385"+ - "\u0384\u0001\u0000\u0000\u0000\u0386\u0389\u0001\u0000\u0000\u0000\u0387"+ - "\u0385\u0001\u0000\u0000\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388"+ - "\u0394\u0001\u0000\u0000\u0000\u0389\u0387\u0001\u0000\u0000\u0000\u038a"+ - "\u038d\u0003Q!\u0000\u038b\u038d\u0003K\u001e\u0000\u038c\u038a\u0001"+ - "\u0000\u0000\u0000\u038c\u038b\u0001\u0000\u0000\u0000\u038d\u038f\u0001"+ - "\u0000\u0000\u0000\u038e\u0390\u0003S\"\u0000\u038f\u038e\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u038f\u0001\u0000"+ - "\u0000\u0000\u0391\u0392\u0001\u0000\u0000\u0000\u0392\u0394\u0001\u0000"+ - "\u0000\u0000\u0393\u0383\u0001\u0000\u0000\u0000\u0393\u038c\u0001\u0000"+ - "\u0000\u0000\u0394\u00ac\u0001\u0000\u0000\u0000\u0395\u0397\u0003M\u001f"+ - "\u0000\u0396\u0398\u0003O \u0000\u0397\u0396\u0001\u0000\u0000\u0000\u0398"+ - "\u0399\u0001\u0000\u0000\u0000\u0399\u0397\u0001\u0000\u0000\u0000\u0399"+ - "\u039a\u0001\u0000\u0000\u0000\u039a\u039b\u0001\u0000\u0000\u0000\u039b"+ - "\u039c\u0003M\u001f\u0000\u039c\u00ae\u0001\u0000\u0000\u0000\u039d\u039e"+ - "\u0003\u00adO\u0000\u039e\u00b0\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ - "7\u0014\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006Q\n"+ - "\u0000\u03a2\u00b2\u0001\u0000\u0000\u0000\u03a3\u03a4\u00039\u0015\u0000"+ - "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006R\n\u0000\u03a6"+ - "\u00b4\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003;\u0016\u0000\u03a8\u03a9"+ - "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006S\n\u0000\u03aa\u00b6\u0001"+ - "\u0000\u0000\u0000\u03ab\u03ac\u0003\u00a7L\u0000\u03ac\u03ad\u0001\u0000"+ - "\u0000\u0000\u03ad\u03ae\u0006T\u000e\u0000\u03ae\u03af\u0006T\u000f\u0000"+ - "\u03af\u00b8\u0001\u0000\u0000\u0000\u03b0\u03b1\u0003?\u0018\u0000\u03b1"+ - "\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006U\u0010\u0000\u03b3\u03b4"+ - "\u0006U\u000b\u0000\u03b4\u00ba\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003"+ - ";\u0016\u0000\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006V\n"+ - "\u0000\u03b8\u00bc\u0001\u0000\u0000\u0000\u03b9\u03ba\u00037\u0014\u0000"+ - "\u03ba\u03bb\u0001\u0000\u0000\u0000\u03bb\u03bc\u0006W\n\u0000\u03bc"+ - "\u00be\u0001\u0000\u0000\u0000\u03bd\u03be\u00039\u0015\u0000\u03be\u03bf"+ - "\u0001\u0000\u0000\u0000\u03bf\u03c0\u0006X\n\u0000\u03c0\u00c0\u0001"+ - "\u0000\u0000\u0000\u03c1\u03c2\u0003?\u0018\u0000\u03c2\u03c3\u0001\u0000"+ - "\u0000\u0000\u03c3\u03c4\u0006Y\u0010\u0000\u03c4\u03c5\u0006Y\u000b\u0000"+ - "\u03c5\u00c2\u0001\u0000\u0000\u0000\u03c6\u03c7\u0003\u00a7L\u0000\u03c7"+ - "\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9\u0006Z\u000e\u0000\u03c9\u00c4"+ - "\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003\u00a9M\u0000\u03cb\u03cc\u0001"+ - "\u0000\u0000\u0000\u03cc\u03cd\u0006[\u0011\u0000\u03cd\u00c6\u0001\u0000"+ - "\u0000\u0000\u03ce\u03cf\u0003=\u0017\u0000\u03cf\u03d0\u0001\u0000\u0000"+ - "\u0000\u03d0\u03d1\u0006\\\f\u0000\u03d1\u00c8\u0001\u0000\u0000\u0000"+ - "\u03d2\u03d3\u0003e+\u0000\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03d5"+ - "\u0006]\u0012\u0000\u03d5\u00ca\u0001\u0000\u0000\u0000\u03d6\u03d7\u0003"+ - "a)\u0000\u03d7\u03d8\u0001\u0000\u0000\u0000\u03d8\u03d9\u0006^\u0013"+ - "\u0000\u03d9\u00cc\u0001\u0000\u0000\u0000\u03da\u03db\u0007\u0010\u0000"+ - "\u0000\u03db\u03dc\u0007\u0003\u0000\u0000\u03dc\u03dd\u0007\u0005\u0000"+ - "\u0000\u03dd\u03de\u0007\f\u0000\u0000\u03de\u03df\u0007\u0000\u0000\u0000"+ - "\u03df\u03e0\u0007\f\u0000\u0000\u03e0\u03e1\u0007\u0005\u0000\u0000\u03e1"+ - "\u03e2\u0007\f\u0000\u0000\u03e2\u00ce\u0001\u0000\u0000\u0000\u03e3\u03e7"+ - "\b \u0000\u0000\u03e4\u03e5\u0005/\u0000\u0000\u03e5\u03e7\b!\u0000\u0000"+ - "\u03e6\u03e3\u0001\u0000\u0000\u0000\u03e6\u03e4\u0001\u0000\u0000\u0000"+ - "\u03e7\u00d0\u0001\u0000\u0000\u0000\u03e8\u03ea\u0003\u00cf`\u0000\u03e9"+ - "\u03e8\u0001\u0000\u0000\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb"+ - "\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec"+ - "\u00d2\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00d1a\u0000\u03ee\u03ef"+ - "\u0001\u0000\u0000\u0000\u03ef\u03f0\u0006b\u0014\u0000\u03f0\u00d4\u0001"+ - "\u0000\u0000\u0000\u03f1\u03f2\u0003U#\u0000\u03f2\u03f3\u0001\u0000\u0000"+ - "\u0000\u03f3\u03f4\u0006c\u0015\u0000\u03f4\u00d6\u0001\u0000\u0000\u0000"+ - "\u03f5\u03f6\u00037\u0014\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ - "\u03f8\u0006d\n\u0000\u03f8\u00d8\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u00039\u0015\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ - "e\n\u0000\u03fc\u00da\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003;\u0016"+ - "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006f\n\u0000"+ - "\u0400\u00dc\u0001\u0000\u0000\u0000\u0401\u0402\u0003?\u0018\u0000\u0402"+ - "\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006g\u0010\u0000\u0404\u0405"+ - "\u0006g\u000b\u0000\u0405\u00de\u0001\u0000\u0000\u0000\u0406\u0407\u0003"+ - "i-\u0000\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006h\u0016"+ - "\u0000\u0409\u00e0\u0001\u0000\u0000\u0000\u040a\u040b\u0003e+\u0000\u040b"+ - "\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006i\u0012\u0000\u040d\u00e2"+ - "\u0001\u0000\u0000\u0000\u040e\u040f\u0004j\u0004\u0000\u040f\u0410\u0003"+ - "\u00819\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006j\u0017"+ - "\u0000\u0412\u00e4\u0001\u0000\u0000\u0000\u0413\u0414\u0004k\u0005\u0000"+ - "\u0414\u0415\u0003\u00a5K\u0000\u0415\u0416\u0001\u0000\u0000\u0000\u0416"+ - "\u0417\u0006k\u0018\u0000\u0417\u00e6\u0001\u0000\u0000\u0000\u0418\u041d"+ - "\u0003C\u001a\u0000\u0419\u041d\u0003A\u0019\u0000\u041a\u041d\u0003Q"+ - "!\u0000\u041b\u041d\u0003\u009bF\u0000\u041c\u0418\u0001\u0000\u0000\u0000"+ - "\u041c\u0419\u0001\u0000\u0000\u0000\u041c\u041a\u0001\u0000\u0000\u0000"+ - "\u041c\u041b\u0001\u0000\u0000\u0000\u041d\u00e8\u0001\u0000\u0000\u0000"+ - "\u041e\u0421\u0003C\u001a\u0000\u041f\u0421\u0003\u009bF\u0000\u0420\u041e"+ - "\u0001\u0000\u0000\u0000\u0420\u041f\u0001\u0000\u0000\u0000\u0421\u0425"+ - "\u0001\u0000\u0000\u0000\u0422\u0424\u0003\u00e7l\u0000\u0423\u0422\u0001"+ - "\u0000\u0000\u0000\u0424\u0427\u0001\u0000\u0000\u0000\u0425\u0423\u0001"+ - "\u0000\u0000\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0432\u0001"+ - "\u0000\u0000\u0000\u0427\u0425\u0001\u0000\u0000\u0000\u0428\u042b\u0003"+ - "Q!\u0000\u0429\u042b\u0003K\u001e\u0000\u042a\u0428\u0001\u0000\u0000"+ - "\u0000\u042a\u0429\u0001\u0000\u0000\u0000\u042b\u042d\u0001\u0000\u0000"+ - "\u0000\u042c\u042e\u0003\u00e7l\u0000\u042d\u042c\u0001\u0000\u0000\u0000"+ - "\u042e\u042f\u0001\u0000\u0000\u0000\u042f\u042d\u0001\u0000\u0000\u0000"+ - "\u042f\u0430\u0001\u0000\u0000\u0000\u0430\u0432\u0001\u0000\u0000\u0000"+ - "\u0431\u0420\u0001\u0000\u0000\u0000\u0431\u042a\u0001\u0000\u0000\u0000"+ - "\u0432\u00ea\u0001\u0000\u0000\u0000\u0433\u0436\u0003\u00e9m\u0000\u0434"+ - "\u0436\u0003\u00adO\u0000\u0435\u0433\u0001\u0000\u0000\u0000\u0435\u0434"+ - "\u0001\u0000\u0000\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0435"+ - "\u0001\u0000\u0000\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u00ec"+ - "\u0001\u0000\u0000\u0000\u0439\u043a\u00037\u0014\u0000\u043a\u043b\u0001"+ - "\u0000\u0000\u0000\u043b\u043c\u0006o\n\u0000\u043c\u00ee\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00039\u0015\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006p\n\u0000\u0440\u00f0\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u0003;\u0016\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006q\n\u0000\u0444\u00f2\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u0003?\u0018\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "r\u0010\u0000\u0448\u0449\u0006r\u000b\u0000\u0449\u00f4\u0001\u0000\u0000"+ - "\u0000\u044a\u044b\u0003a)\u0000\u044b\u044c\u0001\u0000\u0000\u0000\u044c"+ - "\u044d\u0006s\u0013\u0000\u044d\u00f6\u0001\u0000\u0000\u0000\u044e\u044f"+ - "\u0003e+\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450\u0451\u0006t"+ - "\u0012\u0000\u0451\u00f8\u0001\u0000\u0000\u0000\u0452\u0453\u0003i-\u0000"+ - "\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455\u0006u\u0016\u0000\u0455"+ - "\u00fa\u0001\u0000\u0000\u0000\u0456\u0457\u0004v\u0006\u0000\u0457\u0458"+ - "\u0003\u00819\u0000\u0458\u0459\u0001\u0000\u0000\u0000\u0459\u045a\u0006"+ - "v\u0017\u0000\u045a\u00fc\u0001\u0000\u0000\u0000\u045b\u045c\u0004w\u0007"+ - "\u0000\u045c\u045d\u0003\u00a5K\u0000\u045d\u045e\u0001\u0000\u0000\u0000"+ - "\u045e\u045f\u0006w\u0018\u0000\u045f\u00fe\u0001\u0000\u0000\u0000\u0460"+ - "\u0461\u0007\f\u0000\u0000\u0461\u0462\u0007\u0002\u0000\u0000\u0462\u0100"+ - "\u0001\u0000\u0000\u0000\u0463\u0464\u0003\u00ebn\u0000\u0464\u0465\u0001"+ - "\u0000\u0000\u0000\u0465\u0466\u0006y\u0019\u0000\u0466\u0102\u0001\u0000"+ - "\u0000\u0000\u0467\u0468\u00037\u0014\u0000\u0468\u0469\u0001\u0000\u0000"+ - "\u0000\u0469\u046a\u0006z\n\u0000\u046a\u0104\u0001\u0000\u0000\u0000"+ - "\u046b\u046c\u00039\u0015\u0000\u046c\u046d\u0001\u0000\u0000\u0000\u046d"+ - "\u046e\u0006{\n\u0000\u046e\u0106\u0001\u0000\u0000\u0000\u046f\u0470"+ - "\u0003;\u0016\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006"+ - "|\n\u0000\u0472\u0108\u0001\u0000\u0000\u0000\u0473\u0474\u0003?\u0018"+ - "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0476\u0006}\u0010\u0000"+ - "\u0476\u0477\u0006}\u000b\u0000\u0477\u010a\u0001\u0000\u0000\u0000\u0478"+ - "\u0479\u0003\u00a7L\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b"+ - "\u0006~\u000e\u0000\u047b\u047c\u0006~\u001a\u0000\u047c\u010c\u0001\u0000"+ - "\u0000\u0000\u047d\u047e\u0007\u0007\u0000\u0000\u047e\u047f\u0007\t\u0000"+ - "\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006\u007f\u001b"+ - "\u0000\u0481\u010e\u0001\u0000\u0000\u0000\u0482\u0483\u0007\u0013\u0000"+ - "\u0000\u0483\u0484\u0007\u0001\u0000\u0000\u0484\u0485\u0007\u0005\u0000"+ - "\u0000\u0485\u0486\u0007\n\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000"+ - "\u0487\u0488\u0006\u0080\u001b\u0000\u0488\u0110\u0001\u0000\u0000\u0000"+ - "\u0489\u048a\b\"\u0000\u0000\u048a\u0112\u0001\u0000\u0000\u0000\u048b"+ - "\u048d\u0003\u0111\u0081\u0000\u048c\u048b\u0001\u0000\u0000\u0000\u048d"+ - "\u048e\u0001\u0000\u0000\u0000\u048e\u048c\u0001\u0000\u0000\u0000\u048e"+ - "\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0001\u0000\u0000\u0000\u0490"+ - "\u0491\u0003=\u0017\u0000\u0491\u0493\u0001\u0000\u0000\u0000\u0492\u048c"+ - "\u0001\u0000\u0000\u0000\u0492\u0493\u0001\u0000\u0000\u0000\u0493\u0495"+ - "\u0001\u0000\u0000\u0000\u0494\u0496\u0003\u0111\u0081\u0000\u0495\u0494"+ - "\u0001\u0000\u0000\u0000\u0496\u0497\u0001\u0000\u0000\u0000\u0497\u0495"+ - "\u0001\u0000\u0000\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0114"+ - "\u0001\u0000\u0000\u0000\u0499\u049a\u0003\u0113\u0082\u0000\u049a\u049b"+ - "\u0001\u0000\u0000\u0000\u049b\u049c\u0006\u0083\u001c\u0000\u049c\u0116"+ - "\u0001\u0000\u0000\u0000\u049d\u049e\u00037\u0014\u0000\u049e\u049f\u0001"+ - "\u0000\u0000\u0000\u049f\u04a0\u0006\u0084\n\u0000\u04a0\u0118\u0001\u0000"+ - "\u0000\u0000\u04a1\u04a2\u00039\u0015\u0000\u04a2\u04a3\u0001\u0000\u0000"+ - "\u0000\u04a3\u04a4\u0006\u0085\n\u0000\u04a4\u011a\u0001\u0000\u0000\u0000"+ - "\u04a5\u04a6\u0003;\u0016\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000\u04a7"+ - "\u04a8\u0006\u0086\n\u0000\u04a8\u011c\u0001\u0000\u0000\u0000\u04a9\u04aa"+ - "\u0003?\u0018\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac\u0006"+ - "\u0087\u0010\u0000\u04ac\u04ad\u0006\u0087\u000b\u0000\u04ad\u04ae\u0006"+ - "\u0087\u000b\u0000\u04ae\u011e\u0001\u0000\u0000\u0000\u04af\u04b0\u0003"+ - "a)\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006\u0088\u0013"+ - "\u0000\u04b2\u0120\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003e+\u0000\u04b4"+ - "\u04b5\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u0089\u0012\u0000\u04b6"+ - "\u0122\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003i-\u0000\u04b8\u04b9\u0001"+ - "\u0000\u0000\u0000\u04b9\u04ba\u0006\u008a\u0016\u0000\u04ba\u0124\u0001"+ - "\u0000\u0000\u0000\u04bb\u04bc\u0003\u010f\u0080\u0000\u04bc\u04bd\u0001"+ - "\u0000\u0000\u0000\u04bd\u04be\u0006\u008b\u001d\u0000\u04be\u0126\u0001"+ - "\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ebn\u0000\u04c0\u04c1\u0001\u0000"+ - "\u0000\u0000\u04c1\u04c2\u0006\u008c\u0019\u0000\u04c2\u0128\u0001\u0000"+ - "\u0000\u0000\u04c3\u04c4\u0003\u00afP\u0000\u04c4\u04c5\u0001\u0000\u0000"+ - "\u0000\u04c5\u04c6\u0006\u008d\u001e\u0000\u04c6\u012a\u0001\u0000\u0000"+ - "\u0000\u04c7\u04c8\u0004\u008e\b\u0000\u04c8\u04c9\u0003\u00819\u0000"+ - "\u04c9\u04ca\u0001\u0000\u0000\u0000\u04ca\u04cb\u0006\u008e\u0017\u0000"+ - "\u04cb\u012c\u0001\u0000\u0000\u0000\u04cc\u04cd\u0004\u008f\t\u0000\u04cd"+ - "\u04ce\u0003\u00a5K\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000\u04cf\u04d0"+ - "\u0006\u008f\u0018\u0000\u04d0\u012e\u0001\u0000\u0000\u0000\u04d1\u04d2"+ - "\u00037\u0014\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ - "\u0090\n\u0000\u04d4\u0130\u0001\u0000\u0000\u0000\u04d5\u04d6\u00039"+ - "\u0015\u0000\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8\u0006\u0091"+ - "\n\u0000\u04d8\u0132\u0001\u0000\u0000\u0000\u04d9\u04da\u0003;\u0016"+ - "\u0000\u04da\u04db\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0092\n\u0000"+ - "\u04dc\u0134\u0001\u0000\u0000\u0000\u04dd\u04de\u0003?\u0018\u0000\u04de"+ - "\u04df\u0001\u0000\u0000\u0000\u04df\u04e0\u0006\u0093\u0010\u0000\u04e0"+ - "\u04e1\u0006\u0093\u000b\u0000\u04e1\u0136\u0001\u0000\u0000\u0000\u04e2"+ - "\u04e3\u0003i-\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006"+ - "\u0094\u0016\u0000\u04e5\u0138\u0001\u0000\u0000\u0000\u04e6\u04e7\u0004"+ - "\u0095\n\u0000\u04e7\u04e8\u0003\u00819\u0000\u04e8\u04e9\u0001\u0000"+ - "\u0000\u0000\u04e9\u04ea\u0006\u0095\u0017\u0000\u04ea\u013a\u0001\u0000"+ - "\u0000\u0000\u04eb\u04ec\u0004\u0096\u000b\u0000\u04ec\u04ed\u0003\u00a5"+ - "K\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006\u0096\u0018"+ - "\u0000\u04ef\u013c\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003\u00afP\u0000"+ - "\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u0097\u001e\u0000"+ - "\u04f3\u013e\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003\u00abN\u0000\u04f5"+ - "\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006\u0098\u001f\u0000\u04f7"+ - "\u0140\u0001\u0000\u0000\u0000\u04f8\u04f9\u00037\u0014\u0000\u04f9\u04fa"+ - "\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u0099\n\u0000\u04fb\u0142\u0001"+ - "\u0000\u0000\u0000\u04fc\u04fd\u00039\u0015\u0000\u04fd\u04fe\u0001\u0000"+ - "\u0000\u0000\u04fe\u04ff\u0006\u009a\n\u0000\u04ff\u0144\u0001\u0000\u0000"+ - "\u0000\u0500\u0501\u0003;\u0016\u0000\u0501\u0502\u0001\u0000\u0000\u0000"+ - "\u0502\u0503\u0006\u009b\n\u0000\u0503\u0146\u0001\u0000\u0000\u0000\u0504"+ - "\u0505\u0003?\u0018\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507"+ - "\u0006\u009c\u0010\u0000\u0507\u0508\u0006\u009c\u000b\u0000\u0508\u0148"+ - "\u0001\u0000\u0000\u0000\u0509\u050a\u0007\u0001\u0000\u0000\u050a\u050b"+ - "\u0007\t\u0000\u0000\u050b\u050c\u0007\u000f\u0000\u0000\u050c\u050d\u0007"+ - "\u0007\u0000\u0000\u050d\u014a\u0001\u0000\u0000\u0000\u050e\u050f\u0003"+ - "7\u0014\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511\u0006\u009e"+ - "\n\u0000\u0511\u014c\u0001\u0000\u0000\u0000\u0512\u0513\u00039\u0015"+ - "\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u009f\n\u0000"+ - "\u0515\u014e\u0001\u0000\u0000\u0000\u0516\u0517\u0003;\u0016\u0000\u0517"+ - "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00a0\n\u0000\u0519\u0150"+ - "\u0001\u0000\u0000\u0000\u051a\u051b\u0003\u00a9M\u0000\u051b\u051c\u0001"+ - "\u0000\u0000\u0000\u051c\u051d\u0006\u00a1\u0011\u0000\u051d\u051e\u0006"+ - "\u00a1\u000b\u0000\u051e\u0152\u0001\u0000\u0000\u0000\u051f\u0520\u0003"+ - "=\u0017\u0000\u0520\u0521\u0001\u0000\u0000\u0000\u0521\u0522\u0006\u00a2"+ - "\f\u0000\u0522\u0154\u0001\u0000\u0000\u0000\u0523\u0529\u0003K\u001e"+ - "\u0000\u0524\u0529\u0003A\u0019\u0000\u0525\u0529\u0003i-\u0000\u0526"+ - "\u0529\u0003C\u001a\u0000\u0527\u0529\u0003Q!\u0000\u0528\u0523\u0001"+ - "\u0000\u0000\u0000\u0528\u0524\u0001\u0000\u0000\u0000\u0528\u0525\u0001"+ - "\u0000\u0000\u0000\u0528\u0526\u0001\u0000\u0000\u0000\u0528\u0527\u0001"+ - "\u0000\u0000\u0000\u0529\u052a\u0001\u0000\u0000\u0000\u052a\u0528\u0001"+ - "\u0000\u0000\u0000\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u0156\u0001"+ - "\u0000\u0000\u0000\u052c\u052d\u00037\u0014\u0000\u052d\u052e\u0001\u0000"+ - "\u0000\u0000\u052e\u052f\u0006\u00a4\n\u0000\u052f\u0158\u0001\u0000\u0000"+ - "\u0000\u0530\u0531\u00039\u0015\u0000\u0531\u0532\u0001\u0000\u0000\u0000"+ - "\u0532\u0533\u0006\u00a5\n\u0000\u0533\u015a\u0001\u0000\u0000\u0000\u0534"+ - "\u0535\u0003;\u0016\u0000\u0535\u0536\u0001\u0000\u0000\u0000\u0536\u0537"+ - "\u0006\u00a6\n\u0000\u0537\u015c\u0001\u0000\u0000\u0000\u0538\u0539\u0003"+ - "?\u0018\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u00a7"+ - "\u0010\u0000\u053b\u053c\u0006\u00a7\u000b\u0000\u053c\u015e\u0001\u0000"+ - "\u0000\u0000\u053d\u053e\u0003=\u0017\u0000\u053e\u053f\u0001\u0000\u0000"+ - "\u0000\u053f\u0540\u0006\u00a8\f\u0000\u0540\u0160\u0001\u0000\u0000\u0000"+ - "\u0541\u0542\u0003e+\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543\u0544"+ - "\u0006\u00a9\u0012\u0000\u0544\u0162\u0001\u0000\u0000\u0000\u0545\u0546"+ - "\u0003i-\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548\u0006\u00aa"+ - "\u0016\u0000\u0548\u0164\u0001\u0000\u0000\u0000\u0549\u054a\u0003\u010d"+ - "\u007f\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006\u00ab"+ - " \u0000\u054c\u054d\u0006\u00ab!\u0000\u054d\u0166\u0001\u0000\u0000\u0000"+ - "\u054e\u054f\u0003\u00d1a\u0000\u054f\u0550\u0001\u0000\u0000\u0000\u0550"+ - "\u0551\u0006\u00ac\u0014\u0000\u0551\u0168\u0001\u0000\u0000\u0000\u0552"+ - "\u0553\u0003U#\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555\u0006"+ - "\u00ad\u0015\u0000\u0555\u016a\u0001\u0000\u0000\u0000\u0556\u0557\u0003"+ - "7\u0014\u0000\u0557\u0558\u0001\u0000\u0000\u0000\u0558\u0559\u0006\u00ae"+ - "\n\u0000\u0559\u016c\u0001\u0000\u0000\u0000\u055a\u055b\u00039\u0015"+ - "\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006\u00af\n\u0000"+ - "\u055d\u016e\u0001\u0000\u0000\u0000\u055e\u055f\u0003;\u0016\u0000\u055f"+ - "\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00b0\n\u0000\u0561\u0170"+ - "\u0001\u0000\u0000\u0000\u0562\u0563\u0003?\u0018\u0000\u0563\u0564\u0001"+ - "\u0000\u0000\u0000\u0564\u0565\u0006\u00b1\u0010\u0000\u0565\u0566\u0006"+ - "\u00b1\u000b\u0000\u0566\u0567\u0006\u00b1\u000b\u0000\u0567\u0172\u0001"+ - "\u0000\u0000\u0000\u0568\u0569\u0003e+\u0000\u0569\u056a\u0001\u0000\u0000"+ - "\u0000\u056a\u056b\u0006\u00b2\u0012\u0000\u056b\u0174\u0001\u0000\u0000"+ - "\u0000\u056c\u056d\u0003i-\u0000\u056d\u056e\u0001\u0000\u0000\u0000\u056e"+ - "\u056f\u0006\u00b3\u0016\u0000\u056f\u0176\u0001\u0000\u0000\u0000\u0570"+ - "\u0571\u0003\u00ebn\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572\u0573"+ - "\u0006\u00b4\u0019\u0000\u0573\u0178\u0001\u0000\u0000\u0000\u0574\u0575"+ - "\u00037\u0014\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ - "\u00b5\n\u0000\u0577\u017a\u0001\u0000\u0000\u0000\u0578\u0579\u00039"+ - "\u0015\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b6"+ - "\n\u0000\u057b\u017c\u0001\u0000\u0000\u0000\u057c\u057d\u0003;\u0016"+ - "\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u057f\u0006\u00b7\n\u0000"+ - "\u057f\u017e\u0001\u0000\u0000\u0000\u0580\u0581\u0003?\u0018\u0000\u0581"+ - "\u0582\u0001\u0000\u0000\u0000\u0582\u0583\u0006\u00b8\u0010\u0000\u0583"+ - "\u0584\u0006\u00b8\u000b\u0000\u0584\u0180\u0001\u0000\u0000\u0000\u0585"+ - "\u0586\u0003\u00d1a\u0000\u0586\u0587\u0001\u0000\u0000\u0000\u0587\u0588"+ - "\u0006\u00b9\u0014\u0000\u0588\u0589\u0006\u00b9\u000b\u0000\u0589\u058a"+ - "\u0006\u00b9\"\u0000\u058a\u0182\u0001\u0000\u0000\u0000\u058b\u058c\u0003"+ - "U#\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d\u058e\u0006\u00ba\u0015"+ - "\u0000\u058e\u058f\u0006\u00ba\u000b\u0000\u058f\u0590\u0006\u00ba\"\u0000"+ - "\u0590\u0184\u0001\u0000\u0000\u0000\u0591\u0592\u00037\u0014\u0000\u0592"+ - "\u0593\u0001\u0000\u0000\u0000\u0593\u0594\u0006\u00bb\n\u0000\u0594\u0186"+ - "\u0001\u0000\u0000\u0000\u0595\u0596\u00039\u0015\u0000\u0596\u0597\u0001"+ - "\u0000\u0000\u0000\u0597\u0598\u0006\u00bc\n\u0000\u0598\u0188\u0001\u0000"+ - "\u0000\u0000\u0599\u059a\u0003;\u0016\u0000\u059a\u059b\u0001\u0000\u0000"+ - "\u0000\u059b\u059c\u0006\u00bd\n\u0000\u059c\u018a\u0001\u0000\u0000\u0000"+ - "\u059d\u059e\u0003=\u0017\u0000\u059e\u059f\u0001\u0000\u0000\u0000\u059f"+ - "\u05a0\u0006\u00be\f\u0000\u05a0\u05a1\u0006\u00be\u000b\u0000\u05a1\u05a2"+ - "\u0006\u00be\t\u0000\u05a2\u018c\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003"+ - "e+\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00bf\u0012"+ - "\u0000\u05a6\u05a7\u0006\u00bf\u000b\u0000\u05a7\u05a8\u0006\u00bf\t\u0000"+ - "\u05a8\u018e\u0001\u0000\u0000\u0000\u05a9\u05aa\u00037\u0014\u0000\u05aa"+ - "\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac\u0006\u00c0\n\u0000\u05ac\u0190"+ - "\u0001\u0000\u0000\u0000\u05ad\u05ae\u00039\u0015\u0000\u05ae\u05af\u0001"+ - "\u0000\u0000\u0000\u05af\u05b0\u0006\u00c1\n\u0000\u05b0\u0192\u0001\u0000"+ - "\u0000\u0000\u05b1\u05b2\u0003;\u0016\u0000\u05b2\u05b3\u0001\u0000\u0000"+ - "\u0000\u05b3\u05b4\u0006\u00c2\n\u0000\u05b4\u0194\u0001\u0000\u0000\u0000"+ - "\u05b5\u05b6\u0003\u00afP\u0000\u05b6\u05b7\u0001\u0000\u0000\u0000\u05b7"+ - "\u05b8\u0006\u00c3\u000b\u0000\u05b8\u05b9\u0006\u00c3\u0000\u0000\u05b9"+ - "\u05ba\u0006\u00c3\u001e\u0000\u05ba\u0196\u0001\u0000\u0000\u0000\u05bb"+ - "\u05bc\u0003\u00abN\u0000\u05bc\u05bd\u0001\u0000\u0000\u0000\u05bd\u05be"+ - "\u0006\u00c4\u000b\u0000\u05be\u05bf\u0006\u00c4\u0000\u0000\u05bf\u05c0"+ - "\u0006\u00c4\u001f\u0000\u05c0\u0198\u0001\u0000\u0000\u0000\u05c1\u05c2"+ - "\u0003[&\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3\u05c4\u0006\u00c5"+ - "\u000b\u0000\u05c4\u05c5\u0006\u00c5\u0000\u0000\u05c5\u05c6\u0006\u00c5"+ - "#\u0000\u05c6\u019a\u0001\u0000\u0000\u0000\u05c7\u05c8\u0003?\u0018\u0000"+ - "\u05c8\u05c9\u0001\u0000\u0000\u0000\u05c9\u05ca\u0006\u00c6\u0010\u0000"+ - "\u05ca\u05cb\u0006\u00c6\u000b\u0000\u05cb\u019c\u0001\u0000\u0000\u0000"+ - "A\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ - "\u0245\u024f\u0253\u0256\u025f\u0261\u026c\u0281\u0286\u028f\u0296\u029b"+ - "\u029d\u02a8\u02b0\u02b3\u02b5\u02ba\u02bf\u02c5\u02cc\u02d1\u02d7\u02da"+ - "\u02e2\u02e6\u0369\u036e\u0375\u0377\u0387\u038c\u0391\u0393\u0399\u03e6"+ - "\u03eb\u041c\u0420\u0425\u042a\u042f\u0431\u0435\u0437\u048e\u0492\u0497"+ - "\u0528\u052a$\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005"+ - "\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t"+ - "\u0000\u0005\u000b\u0000\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000"+ - "\u0007\u0018\u0000\u0007\u0010\u0000\u0007A\u0000\u0005\u0000\u0000\u0007"+ - "\u0019\u0000\u0007B\u0000\u0007\"\u0000\u0007 \u0000\u0007L\u0000\u0007"+ - "\u001a\u0000\u0007$\u0000\u00070\u0000\u0007@\u0000\u0007P\u0000\u0005"+ - "\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000\u0007D\u0000\u0007"+ - "C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000\u0007\u001d\u0000"; + "\u00a7\u0004\u00a7\u056e\b\u00a7\u000b\u00a7\f\u00a7\u056f\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ + "\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7"+ + "\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ + "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ + "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce"+ + "\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ + "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2"+ + "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0002\u02ab\u02f0\u0000\u00d5\u0010"+ + "\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c"+ + "\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u0012"+ + "4\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001c"+ + "H\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000"+ + "\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ + "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ + ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac"+ + "\u0000\u00aeE\u00b0F\u00b2G\u00b4H\u00b6\u0000\u00b8I\u00baJ\u00bcK\u00be"+ + "L\u00c0\u0000\u00c2\u0000\u00c4M\u00c6N\u00c8O\u00ca\u0000\u00cc\u0000"+ + "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6P\u00d8\u0000\u00da"+ + "Q\u00dc\u0000\u00de\u0000\u00e0R\u00e2S\u00e4T\u00e6\u0000\u00e8\u0000"+ + "\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4U\u00f6"+ + "V\u00f8W\u00faX\u00fc\u0000\u00fe\u0000\u0100\u0000\u0102\u0000\u0104"+ + "\u0000\u0106\u0000\u0108Y\u010a\u0000\u010cZ\u010e[\u0110\\\u0112\u0000"+ + "\u0114\u0000\u0116]\u0118^\u011a\u0000\u011c_\u011e\u0000\u0120`\u0122"+ + "a\u0124b\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ + "\u0000\u0132\u0000\u0134\u0000\u0136\u0000\u0138c\u013ad\u013ce\u013e"+ + "\u0000\u0140\u0000\u0142\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a"+ + "f\u014cg\u014eh\u0150\u0000\u0152i\u0154j\u0156k\u0158l\u015a\u0000\u015c"+ + "\u0000\u015em\u0160n\u0162o\u0164p\u0166\u0000\u0168\u0000\u016a\u0000"+ + "\u016c\u0000\u016e\u0000\u0170\u0000\u0172\u0000\u0174q\u0176r\u0178s"+ + "\u017a\u0000\u017c\u0000\u017e\u0000\u0180\u0000\u0182t\u0184u\u0186v"+ + "\u0188\u0000\u018a\u0000\u018c\u0000\u018e\u0000\u0190w\u0192\u0000\u0194"+ + "\u0000\u0196x\u0198y\u019az\u019c\u0000\u019e\u0000\u01a0\u0000\u01a2"+ + "{\u01a4|\u01a6}\u01a8\u0000\u01aa\u0000\u01ac~\u01ae\u007f\u01b0\u0080"+ + "\u01b2\u0000\u01b4\u0000\u01b6\u0000\u01b8\u0000\u0010\u0000\u0001\u0002"+ + "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000"+ + "DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ + "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ + "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ + "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ + "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r"+ + "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ + "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ + "//::<<>?\\\\||\u065c\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:"+ + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000"+ + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000"+ + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0001H"+ + "\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000"+ + "\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000"+ + "\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j"+ + "\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000"+ + "\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000"+ + "\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x"+ + "\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000"+ + "\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000"+ + "\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000"+ + "\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000"+ + "\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000"+ + "\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000"+ + "\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000"+ + "\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000"+ + "\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000"+ + "\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000"+ + "\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000"+ + "\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000"+ + "\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000"+ + "\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000"+ + "\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000"+ + "\u0000\u0001\u00b8\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000"+ + "\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000"+ + "\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000"+ + "\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000"+ + "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000"+ + "\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ + "\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000"+ + "\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000"+ + "\u0000\u0003\u00da\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000"+ + "\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000"+ + "\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000"+ + "\u0000\u0004\u00e6\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000"+ + "\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000"+ + "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f4\u0001\u0000\u0000"+ + "\u0000\u0004\u00f6\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ + "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000"+ + "\u0000\u0005\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ + "\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000"+ + "\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000"+ + "\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000"+ + "\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000"+ + "\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000"+ + "\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000"+ + "\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000"+ + "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ + "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0007\u0126\u0001\u0000\u0000"+ + "\u0000\u0007\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ + "\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000"+ + "\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000"+ + "\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000"+ + "\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000"+ + "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\b\u013e\u0001\u0000\u0000\u0000"+ + "\b\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144"+ + "\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001"+ + "\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000"+ + "\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\t\u0150\u0001\u0000\u0000"+ + "\u0000\t\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000"+ + "\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\n\u015a"+ + "\u0001\u0000\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001"+ + "\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000"+ + "\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\u000b\u0166\u0001\u0000\u0000"+ + "\u0000\u000b\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ + "\u0000\u000b\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000"+ + "\u0000\u000b\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000"+ + "\u0000\u000b\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000"+ + "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\f\u017a\u0001\u0000\u0000\u0000"+ + "\f\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180"+ + "\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001"+ + "\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\r\u0188\u0001\u0000"+ + "\u0000\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000"+ + "\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000"+ + "\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196"+ + "\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001"+ + "\u0000\u0000\u0000\u000e\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001"+ + "\u0000\u0000\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001"+ + "\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001"+ + "\u0000\u0000\u0000\u000f\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001"+ + "\u0000\u0000\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001"+ + "\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001"+ + "\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001"+ + "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u0010\u01ba\u0001"+ + "\u0000\u0000\u0000\u0012\u01c4\u0001\u0000\u0000\u0000\u0014\u01cb\u0001"+ + "\u0000\u0000\u0000\u0016\u01d4\u0001\u0000\u0000\u0000\u0018\u01db\u0001"+ + "\u0000\u0000\u0000\u001a\u01e5\u0001\u0000\u0000\u0000\u001c\u01ec\u0001"+ + "\u0000\u0000\u0000\u001e\u01f3\u0001\u0000\u0000\u0000 \u01fa\u0001\u0000"+ + "\u0000\u0000\"\u0202\u0001\u0000\u0000\u0000$\u020e\u0001\u0000\u0000"+ + "\u0000&\u0217\u0001\u0000\u0000\u0000(\u021d\u0001\u0000\u0000\u0000*"+ + "\u0224\u0001\u0000\u0000\u0000,\u022b\u0001\u0000\u0000\u0000.\u0233\u0001"+ + "\u0000\u0000\u00000\u023b\u0001\u0000\u0000\u00002\u024a\u0001\u0000\u0000"+ + "\u00004\u0256\u0001\u0000\u0000\u00006\u0261\u0001\u0000\u0000\u00008"+ + "\u0269\u0001\u0000\u0000\u0000:\u0271\u0001\u0000\u0000\u0000<\u0279\u0001"+ + "\u0000\u0000\u0000>\u0282\u0001\u0000\u0000\u0000@\u028d\u0001\u0000\u0000"+ + "\u0000B\u0293\u0001\u0000\u0000\u0000D\u02a4\u0001\u0000\u0000\u0000F"+ + "\u02b4\u0001\u0000\u0000\u0000H\u02ba\u0001\u0000\u0000\u0000J\u02be\u0001"+ + "\u0000\u0000\u0000L\u02c0\u0001\u0000\u0000\u0000N\u02c2\u0001\u0000\u0000"+ + "\u0000P\u02c5\u0001\u0000\u0000\u0000R\u02c7\u0001\u0000\u0000\u0000T"+ + "\u02d0\u0001\u0000\u0000\u0000V\u02d2\u0001\u0000\u0000\u0000X\u02d7\u0001"+ + "\u0000\u0000\u0000Z\u02d9\u0001\u0000\u0000\u0000\\\u02de\u0001\u0000"+ + "\u0000\u0000^\u02fd\u0001\u0000\u0000\u0000`\u0300\u0001\u0000\u0000\u0000"+ + "b\u032e\u0001\u0000\u0000\u0000d\u0330\u0001\u0000\u0000\u0000f\u0333"+ + "\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000j\u033b\u0001\u0000"+ + "\u0000\u0000l\u033d\u0001\u0000\u0000\u0000n\u0340\u0001\u0000\u0000\u0000"+ + "p\u0342\u0001\u0000\u0000\u0000r\u0344\u0001\u0000\u0000\u0000t\u0349"+ + "\u0001\u0000\u0000\u0000v\u034b\u0001\u0000\u0000\u0000x\u0351\u0001\u0000"+ + "\u0000\u0000z\u0357\u0001\u0000\u0000\u0000|\u035a\u0001\u0000\u0000\u0000"+ + "~\u035d\u0001\u0000\u0000\u0000\u0080\u0362\u0001\u0000\u0000\u0000\u0082"+ + "\u0367\u0001\u0000\u0000\u0000\u0084\u0369\u0001\u0000\u0000\u0000\u0086"+ + "\u036d\u0001\u0000\u0000\u0000\u0088\u0372\u0001\u0000\u0000\u0000\u008a"+ + "\u0378\u0001\u0000\u0000\u0000\u008c\u037b\u0001\u0000\u0000\u0000\u008e"+ + "\u037d\u0001\u0000\u0000\u0000\u0090\u0383\u0001\u0000\u0000\u0000\u0092"+ + "\u0385\u0001\u0000\u0000\u0000\u0094\u038a\u0001\u0000\u0000\u0000\u0096"+ + "\u038d\u0001\u0000\u0000\u0000\u0098\u0390\u0001\u0000\u0000\u0000\u009a"+ + "\u0393\u0001\u0000\u0000\u0000\u009c\u0395\u0001\u0000\u0000\u0000\u009e"+ + "\u0398\u0001\u0000\u0000\u0000\u00a0\u039a\u0001\u0000\u0000\u0000\u00a2"+ + "\u039d\u0001\u0000\u0000\u0000\u00a4\u039f\u0001\u0000\u0000\u0000\u00a6"+ + "\u03a1\u0001\u0000\u0000\u0000\u00a8\u03a3\u0001\u0000\u0000\u0000\u00aa"+ + "\u03a5\u0001\u0000\u0000\u0000\u00ac\u03a7\u0001\u0000\u0000\u0000\u00ae"+ + "\u03bc\u0001\u0000\u0000\u0000\u00b0\u03be\u0001\u0000\u0000\u0000\u00b2"+ + "\u03c3\u0001\u0000\u0000\u0000\u00b4\u03d8\u0001\u0000\u0000\u0000\u00b6"+ + "\u03da\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ + "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03e8\u0001\u0000\u0000\u0000\u00be"+ + "\u03ec\u0001\u0000\u0000\u0000\u00c0\u03f0\u0001\u0000\u0000\u0000\u00c2"+ + "\u03f5\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ + "\u03fe\u0001\u0000\u0000\u0000\u00c8\u0402\u0001\u0000\u0000\u0000\u00ca"+ + "\u0406\u0001\u0000\u0000\u0000\u00cc\u040b\u0001\u0000\u0000\u0000\u00ce"+ + "\u040f\u0001\u0000\u0000\u0000\u00d0\u0413\u0001\u0000\u0000\u0000\u00d2"+ + "\u0417\u0001\u0000\u0000\u0000\u00d4\u041b\u0001\u0000\u0000\u0000\u00d6"+ + "\u041f\u0001\u0000\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da"+ + "\u042e\u0001\u0000\u0000\u0000\u00dc\u0432\u0001\u0000\u0000\u0000\u00de"+ + "\u0436\u0001\u0000\u0000\u0000\u00e0\u043a\u0001\u0000\u0000\u0000\u00e2"+ + "\u043e\u0001\u0000\u0000\u0000\u00e4\u0442\u0001\u0000\u0000\u0000\u00e6"+ + "\u0446\u0001\u0000\u0000\u0000\u00e8\u044b\u0001\u0000\u0000\u0000\u00ea"+ + "\u044f\u0001\u0000\u0000\u0000\u00ec\u0453\u0001\u0000\u0000\u0000\u00ee"+ + "\u0458\u0001\u0000\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2"+ + "\u0476\u0001\u0000\u0000\u0000\u00f4\u047a\u0001\u0000\u0000\u0000\u00f6"+ + "\u047e\u0001\u0000\u0000\u0000\u00f8\u0482\u0001\u0000\u0000\u0000\u00fa"+ + "\u0486\u0001\u0000\u0000\u0000\u00fc\u048a\u0001\u0000\u0000\u0000\u00fe"+ + "\u048f\u0001\u0000\u0000\u0000\u0100\u0493\u0001\u0000\u0000\u0000\u0102"+ + "\u0497\u0001\u0000\u0000\u0000\u0104\u049b\u0001\u0000\u0000\u0000\u0106"+ + "\u04a0\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ + "\u04a8\u0001\u0000\u0000\u0000\u010c\u04ac\u0001\u0000\u0000\u0000\u010e"+ + "\u04b0\u0001\u0000\u0000\u0000\u0110\u04b4\u0001\u0000\u0000\u0000\u0112"+ + "\u04b8\u0001\u0000\u0000\u0000\u0114\u04bd\u0001\u0000\u0000\u0000\u0116"+ + "\u04c2\u0001\u0000\u0000\u0000\u0118\u04c7\u0001\u0000\u0000\u0000\u011a"+ + "\u04ce\u0001\u0000\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e"+ + "\u04de\u0001\u0000\u0000\u0000\u0120\u04e2\u0001\u0000\u0000\u0000\u0122"+ + "\u04e6\u0001\u0000\u0000\u0000\u0124\u04ea\u0001\u0000\u0000\u0000\u0126"+ + "\u04ee\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ + "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fc\u0001\u0000\u0000\u0000\u012e"+ + "\u0500\u0001\u0000\u0000\u0000\u0130\u0504\u0001\u0000\u0000\u0000\u0132"+ + "\u0508\u0001\u0000\u0000\u0000\u0134\u050c\u0001\u0000\u0000\u0000\u0136"+ + "\u0511\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ + "\u051a\u0001\u0000\u0000\u0000\u013c\u051e\u0001\u0000\u0000\u0000\u013e"+ + "\u0522\u0001\u0000\u0000\u0000\u0140\u0527\u0001\u0000\u0000\u0000\u0142"+ + "\u052b\u0001\u0000\u0000\u0000\u0144\u0530\u0001\u0000\u0000\u0000\u0146"+ + "\u0535\u0001\u0000\u0000\u0000\u0148\u0539\u0001\u0000\u0000\u0000\u014a"+ + "\u053d\u0001\u0000\u0000\u0000\u014c\u0541\u0001\u0000\u0000\u0000\u014e"+ + "\u0545\u0001\u0000\u0000\u0000\u0150\u0549\u0001\u0000\u0000\u0000\u0152"+ + "\u054e\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ + "\u0557\u0001\u0000\u0000\u0000\u0158\u055b\u0001\u0000\u0000\u0000\u015a"+ + "\u055f\u0001\u0000\u0000\u0000\u015c\u0564\u0001\u0000\u0000\u0000\u015e"+ + "\u056d\u0001\u0000\u0000\u0000\u0160\u0571\u0001\u0000\u0000\u0000\u0162"+ + "\u0575\u0001\u0000\u0000\u0000\u0164\u0579\u0001\u0000\u0000\u0000\u0166"+ + "\u057d\u0001\u0000\u0000\u0000\u0168\u0582\u0001\u0000\u0000\u0000\u016a"+ + "\u0586\u0001\u0000\u0000\u0000\u016c\u058a\u0001\u0000\u0000\u0000\u016e"+ + "\u058e\u0001\u0000\u0000\u0000\u0170\u0593\u0001\u0000\u0000\u0000\u0172"+ + "\u0597\u0001\u0000\u0000\u0000\u0174\u059b\u0001\u0000\u0000\u0000\u0176"+ + "\u059f\u0001\u0000\u0000\u0000\u0178\u05a3\u0001\u0000\u0000\u0000\u017a"+ + "\u05a7\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ + "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b5\u0001\u0000\u0000\u0000\u0182"+ + "\u05b9\u0001\u0000\u0000\u0000\u0184\u05bd\u0001\u0000\u0000\u0000\u0186"+ + "\u05c1\u0001\u0000\u0000\u0000\u0188\u05c5\u0001\u0000\u0000\u0000\u018a"+ + "\u05ca\u0001\u0000\u0000\u0000\u018c\u05ce\u0001\u0000\u0000\u0000\u018e"+ + "\u05d2\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ + "\u05e1\u0001\u0000\u0000\u0000\u0194\u05e5\u0001\u0000\u0000\u0000\u0196"+ + "\u05e9\u0001\u0000\u0000\u0000\u0198\u05ed\u0001\u0000\u0000\u0000\u019a"+ + "\u05f1\u0001\u0000\u0000\u0000\u019c\u05f5\u0001\u0000\u0000\u0000\u019e"+ + "\u05fa\u0001\u0000\u0000\u0000\u01a0\u0600\u0001\u0000\u0000\u0000\u01a2"+ + "\u0606\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ + "\u060e\u0001\u0000\u0000\u0000\u01a8\u0612\u0001\u0000\u0000\u0000\u01aa"+ + "\u0618\u0001\u0000\u0000\u0000\u01ac\u061e\u0001\u0000\u0000\u0000\u01ae"+ + "\u0622\u0001\u0000\u0000\u0000\u01b0\u0626\u0001\u0000\u0000\u0000\u01b2"+ + "\u062a\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ + "\u0636\u0001\u0000\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba"+ + "\u01bb\u0007\u0000\u0000\u0000\u01bb\u01bc\u0007\u0001\u0000\u0000\u01bc"+ + "\u01bd\u0007\u0002\u0000\u0000\u01bd\u01be\u0007\u0002\u0000\u0000\u01be"+ + "\u01bf\u0007\u0003\u0000\u0000\u01bf\u01c0\u0007\u0004\u0000\u0000\u01c0"+ + "\u01c1\u0007\u0005\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"+ + "\u01c3\u0006\u0000\u0000\u0000\u01c3\u0011\u0001\u0000\u0000\u0000\u01c4"+ + "\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0006\u0000\u0000\u01c6"+ + "\u01c7\u0007\u0007\u0000\u0000\u01c7\u01c8\u0007\b\u0000\u0000\u01c8\u01c9"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0006\u0001\u0001\u0000\u01ca\u0013"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0007\u0003\u0000\u0000\u01cc\u01cd"+ + "\u0007\t\u0000\u0000\u01cd\u01ce\u0007\u0006\u0000\u0000\u01ce\u01cf\u0007"+ + "\u0001\u0000\u0000\u01cf\u01d0\u0007\u0004\u0000\u0000\u01d0\u01d1\u0007"+ + "\n\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0002"+ + "\u0002\u0000\u01d3\u0015\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0003"+ + "\u0000\u0000\u01d5\u01d6\u0007\u000b\u0000\u0000\u01d6\u01d7\u0007\f\u0000"+ + "\u0000\u01d7\u01d8\u0007\r\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ + "\u01d9\u01da\u0006\u0003\u0000\u0000\u01da\u0017\u0001\u0000\u0000\u0000"+ + "\u01db\u01dc\u0007\u0003\u0000\u0000\u01dc\u01dd\u0007\u000e\u0000\u0000"+ + "\u01dd\u01de\u0007\b\u0000\u0000\u01de\u01df\u0007\r\u0000\u0000\u01df"+ + "\u01e0\u0007\f\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2"+ + "\u0007\t\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ + "\u0004\u0003\u0000\u01e4\u0019\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007"+ + "\u000f\u0000\u0000\u01e6\u01e7\u0007\u0006\u0000\u0000\u01e7\u01e8\u0007"+ + "\u0007\u0000\u0000\u01e8\u01e9\u0007\u0010\u0000\u0000\u01e9\u01ea\u0001"+ + "\u0000\u0000\u0000\u01ea\u01eb\u0006\u0005\u0004\u0000\u01eb\u001b\u0001"+ + "\u0000\u0000\u0000\u01ec\u01ed\u0007\u0011\u0000\u0000\u01ed\u01ee\u0007"+ + "\u0006\u0000\u0000\u01ee\u01ef\u0007\u0007\u0000\u0000\u01ef\u01f0\u0007"+ + "\u0012\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006"+ + "\u0006\u0000\u0000\u01f2\u001d\u0001\u0000\u0000\u0000\u01f3\u01f4\u0007"+ + "\u0012\u0000\u0000\u01f4\u01f5\u0007\u0003\u0000\u0000\u01f5\u01f6\u0007"+ + "\u0003\u0000\u0000\u01f6\u01f7\u0007\b\u0000\u0000\u01f7\u01f8\u0001\u0000"+ + "\u0000\u0000\u01f8\u01f9\u0006\u0007\u0001\u0000\u01f9\u001f\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0007\r\u0000\u0000\u01fb\u01fc\u0007\u0001\u0000"+ + "\u0000\u01fc\u01fd\u0007\u0010\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000"+ + "\u0000\u01fe\u01ff\u0007\u0005\u0000\u0000\u01ff\u0200\u0001\u0000\u0000"+ + "\u0000\u0200\u0201\u0006\b\u0000\u0000\u0201!\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0007\u0010\u0000\u0000\u0203\u0204\u0007\u000b\u0000\u0000"+ + "\u0204\u0205\u0005_\u0000\u0000\u0205\u0206\u0007\u0003\u0000\u0000\u0206"+ + "\u0207\u0007\u000e\u0000\u0000\u0207\u0208\u0007\b\u0000\u0000\u0208\u0209"+ + "\u0007\f\u0000\u0000\u0209\u020a\u0007\t\u0000\u0000\u020a\u020b\u0007"+ + "\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0006"+ + "\t\u0005\u0000\u020d#\u0001\u0000\u0000\u0000\u020e\u020f\u0007\u0006"+ + "\u0000\u0000\u020f\u0210\u0007\u0003\u0000\u0000\u0210\u0211\u0007\t\u0000"+ + "\u0000\u0211\u0212\u0007\f\u0000\u0000\u0212\u0213\u0007\u0010\u0000\u0000"+ + "\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000"+ + "\u0215\u0216\u0006\n\u0006\u0000\u0216%\u0001\u0000\u0000\u0000\u0217"+ + "\u0218\u0007\u0006\u0000\u0000\u0218\u0219\u0007\u0007\u0000\u0000\u0219"+ + "\u021a\u0007\u0013\u0000\u0000\u021a\u021b\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0006\u000b\u0000\u0000\u021c\'\u0001\u0000\u0000\u0000\u021d\u021e"+ + "\u0007\u0002\u0000\u0000\u021e\u021f\u0007\n\u0000\u0000\u021f\u0220\u0007"+ + "\u0007\u0000\u0000\u0220\u0221\u0007\u0013\u0000\u0000\u0221\u0222\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0006\f\u0007\u0000\u0223)\u0001\u0000"+ + "\u0000\u0000\u0224\u0225\u0007\u0002\u0000\u0000\u0225\u0226\u0007\u0007"+ + "\u0000\u0000\u0226\u0227\u0007\u0006\u0000\u0000\u0227\u0228\u0007\u0005"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0006\r\u0000"+ + "\u0000\u022a+\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0002\u0000\u0000"+ + "\u022c\u022d\u0007\u0005\u0000\u0000\u022d\u022e\u0007\f\u0000\u0000\u022e"+ + "\u022f\u0007\u0005\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000\u0230"+ + "\u0231\u0001\u0000\u0000\u0000\u0231\u0232\u0006\u000e\u0000\u0000\u0232"+ + "-\u0001\u0000\u0000\u0000\u0233\u0234\u0007\u0013\u0000\u0000\u0234\u0235"+ + "\u0007\n\u0000\u0000\u0235\u0236\u0007\u0003\u0000\u0000\u0236\u0237\u0007"+ + "\u0006\u0000\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0006\u000f\u0000\u0000\u023a/\u0001\u0000"+ + "\u0000\u0000\u023b\u023c\u0004\u0010\u0000\u0000\u023c\u023d\u0007\u0001"+ + "\u0000\u0000\u023d\u023e\u0007\t\u0000\u0000\u023e\u023f\u0007\r\u0000"+ + "\u0000\u023f\u0240\u0007\u0001\u0000\u0000\u0240\u0241\u0007\t\u0000\u0000"+ + "\u0241\u0242\u0007\u0003\u0000\u0000\u0242\u0243\u0007\u0002\u0000\u0000"+ + "\u0243\u0244\u0007\u0005\u0000\u0000\u0244\u0245\u0007\f\u0000\u0000\u0245"+ + "\u0246\u0007\u0005\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000\u0247"+ + "\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0006\u0010\u0000\u0000\u0249"+ + "1\u0001\u0000\u0000\u0000\u024a\u024b\u0004\u0011\u0001\u0000\u024b\u024c"+ + "\u0007\r\u0000\u0000\u024c\u024d\u0007\u0007\u0000\u0000\u024d\u024e\u0007"+ + "\u0007\u0000\u0000\u024e\u024f\u0007\u0012\u0000\u0000\u024f\u0250\u0007"+ + "\u0014\u0000\u0000\u0250\u0251\u0007\b\u0000\u0000\u0251\u0252\u0005_"+ + "\u0000\u0000\u0252\u0253\u0005\u8001\uf414\u0000\u0000\u0253\u0254\u0001"+ + "\u0000\u0000\u0000\u0254\u0255\u0006\u0011\b\u0000\u02553\u0001\u0000"+ + "\u0000\u0000\u0256\u0257\u0004\u0012\u0002\u0000\u0257\u0258\u0007\u0010"+ + "\u0000\u0000\u0258\u0259\u0007\u0003\u0000\u0000\u0259\u025a\u0007\u0005"+ + "\u0000\u0000\u025a\u025b\u0007\u0006\u0000\u0000\u025b\u025c\u0007\u0001"+ + "\u0000\u0000\u025c\u025d\u0007\u0004\u0000\u0000\u025d\u025e\u0007\u0002"+ + "\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0260\u0006\u0012"+ + "\t\u0000\u02605\u0001\u0000\u0000\u0000\u0261\u0262\u0004\u0013\u0003"+ + "\u0000\u0262\u0263\u0007\u0015\u0000\u0000\u0263\u0264\u0007\u0007\u0000"+ + "\u0000\u0264\u0265\u0007\u0001\u0000\u0000\u0265\u0266\u0007\t\u0000\u0000"+ + "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0013\n\u0000\u0268"+ + "7\u0001\u0000\u0000\u0000\u0269\u026a\u0004\u0014\u0004\u0000\u026a\u026b"+ + "\u0007\u000f\u0000\u0000\u026b\u026c\u0007\u0014\u0000\u0000\u026c\u026d"+ + "\u0007\r\u0000\u0000\u026d\u026e\u0007\r\u0000\u0000\u026e\u026f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0006\u0014\n\u0000\u02709\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0004\u0015\u0005\u0000\u0272\u0273\u0007\r\u0000"+ + "\u0000\u0273\u0274\u0007\u0003\u0000\u0000\u0274\u0275\u0007\u000f\u0000"+ + "\u0000\u0275\u0276\u0007\u0005\u0000\u0000\u0276\u0277\u0001\u0000\u0000"+ + "\u0000\u0277\u0278\u0006\u0015\n\u0000\u0278;\u0001\u0000\u0000\u0000"+ + "\u0279\u027a\u0004\u0016\u0006\u0000\u027a\u027b\u0007\u0006\u0000\u0000"+ + "\u027b\u027c\u0007\u0001\u0000\u0000\u027c\u027d\u0007\u0011\u0000\u0000"+ + "\u027d\u027e\u0007\n\u0000\u0000\u027e\u027f\u0007\u0005\u0000\u0000\u027f"+ + "\u0280\u0001\u0000\u0000\u0000\u0280\u0281\u0006\u0016\n\u0000\u0281="+ + "\u0001\u0000\u0000\u0000\u0282\u0283\u0004\u0017\u0007\u0000\u0283\u0284"+ + "\u0007\r\u0000\u0000\u0284\u0285\u0007\u0007\u0000\u0000\u0285\u0286\u0007"+ + "\u0007\u0000\u0000\u0286\u0287\u0007\u0012\u0000\u0000\u0287\u0288\u0007"+ + "\u0014\u0000\u0000\u0288\u0289\u0007\b\u0000\u0000\u0289\u028a\u0001\u0000"+ + "\u0000\u0000\u028a\u028b\u0006\u0017\n\u0000\u028b?\u0001\u0000\u0000"+ + "\u0000\u028c\u028e\b\u0016\u0000\u0000\u028d\u028c\u0001\u0000\u0000\u0000"+ + "\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000\u0000\u0000"+ + "\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000"+ + "\u0291\u0292\u0006\u0018\u0000\u0000\u0292A\u0001\u0000\u0000\u0000\u0293"+ + "\u0294\u0005/\u0000\u0000\u0294\u0295\u0005/\u0000\u0000\u0295\u0299\u0001"+ + "\u0000\u0000\u0000\u0296\u0298\b\u0017\u0000\u0000\u0297\u0296\u0001\u0000"+ + "\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000"+ + "\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029d\u0001\u0000"+ + "\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u029e\u0005\r\u0000"+ + "\u0000\u029d\u029c\u0001\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000"+ + "\u0000\u029e\u02a0\u0001\u0000\u0000\u0000\u029f\u02a1\u0005\n\u0000\u0000"+ + "\u02a0\u029f\u0001\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000\u0000\u0000"+ + "\u02a1\u02a2\u0001\u0000\u0000\u0000\u02a2\u02a3\u0006\u0019\u000b\u0000"+ + "\u02a3C\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005/\u0000\u0000\u02a5\u02a6"+ + "\u0005*\u0000\u0000\u02a6\u02ab\u0001\u0000\u0000\u0000\u02a7\u02aa\u0003"+ + "D\u001a\u0000\u02a8\u02aa\t\u0000\u0000\u0000\u02a9\u02a7\u0001\u0000"+ + "\u0000\u0000\u02a9\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ad\u0001\u0000"+ + "\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000"+ + "\u0000\u0000\u02ac\u02ae\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ + "\u0000\u0000\u02ae\u02af\u0005*\u0000\u0000\u02af\u02b0\u0005/\u0000\u0000"+ + "\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b2\u0006\u001a\u000b\u0000"+ + "\u02b2E\u0001\u0000\u0000\u0000\u02b3\u02b5\u0007\u0018\u0000\u0000\u02b4"+ + "\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6"+ + "\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000\u02b7"+ + "\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b9\u0006\u001b\u000b\u0000\u02b9"+ + "G\u0001\u0000\u0000\u0000\u02ba\u02bb\u0005|\u0000\u0000\u02bb\u02bc\u0001"+ + "\u0000\u0000\u0000\u02bc\u02bd\u0006\u001c\f\u0000\u02bdI\u0001\u0000"+ + "\u0000\u0000\u02be\u02bf\u0007\u0019\u0000\u0000\u02bfK\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0007\u001a\u0000\u0000\u02c1M\u0001\u0000\u0000\u0000"+ + "\u02c2\u02c3\u0005\\\u0000\u0000\u02c3\u02c4\u0007\u001b\u0000\u0000\u02c4"+ + "O\u0001\u0000\u0000\u0000\u02c5\u02c6\b\u001c\u0000\u0000\u02c6Q\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c9\u0007\u0003\u0000\u0000\u02c8\u02ca\u0007"+ + "\u001d\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0003"+ + "J\u001d\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000"+ + "\u0000\u0000\u02ce\u02cc\u0001\u0000\u0000\u0000\u02ce\u02cf\u0001\u0000"+ + "\u0000\u0000\u02cfS\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005@\u0000\u0000"+ + "\u02d1U\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005`\u0000\u0000\u02d3W"+ + "\u0001\u0000\u0000\u0000\u02d4\u02d8\b\u001e\u0000\u0000\u02d5\u02d6\u0005"+ + "`\u0000\u0000\u02d6\u02d8\u0005`\u0000\u0000\u02d7\u02d4\u0001\u0000\u0000"+ + "\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d8Y\u0001\u0000\u0000\u0000"+ + "\u02d9\u02da\u0005_\u0000\u0000\u02da[\u0001\u0000\u0000\u0000\u02db\u02df"+ + "\u0003L\u001e\u0000\u02dc\u02df\u0003J\u001d\u0000\u02dd\u02df\u0003Z"+ + "%\u0000\u02de\u02db\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000"+ + "\u0000\u02de\u02dd\u0001\u0000\u0000\u0000\u02df]\u0001\u0000\u0000\u0000"+ + "\u02e0\u02e5\u0005\"\u0000\u0000\u02e1\u02e4\u0003N\u001f\u0000\u02e2"+ + "\u02e4\u0003P \u0000\u02e3\u02e1\u0001\u0000\u0000\u0000\u02e3\u02e2\u0001"+ + "\u0000\u0000\u0000\u02e4\u02e7\u0001\u0000\u0000\u0000\u02e5\u02e3\u0001"+ + "\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000\u0000\u02e6\u02e8\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e8\u02fe\u0005"+ + "\"\u0000\u0000\u02e9\u02ea\u0005\"\u0000\u0000\u02ea\u02eb\u0005\"\u0000"+ + "\u0000\u02eb\u02ec\u0005\"\u0000\u0000\u02ec\u02f0\u0001\u0000\u0000\u0000"+ + "\u02ed\u02ef\b\u0017\u0000\u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef"+ + "\u02f2\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f0"+ + "\u02ee\u0001\u0000\u0000\u0000\u02f1\u02f3\u0001\u0000\u0000\u0000\u02f2"+ + "\u02f0\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5"+ + "\u0005\"\u0000\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02f8\u0001"+ + "\u0000\u0000\u0000\u02f7\u02f9\u0005\"\u0000\u0000\u02f8\u02f7\u0001\u0000"+ + "\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u02fb\u0001\u0000"+ + "\u0000\u0000\u02fa\u02fc\u0005\"\u0000\u0000\u02fb\u02fa\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0001\u0000\u0000\u0000\u02fc\u02fe\u0001\u0000\u0000"+ + "\u0000\u02fd\u02e0\u0001\u0000\u0000\u0000\u02fd\u02e9\u0001\u0000\u0000"+ + "\u0000\u02fe_\u0001\u0000\u0000\u0000\u02ff\u0301\u0003J\u001d\u0000\u0300"+ + "\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001\u0000\u0000\u0000\u0302"+ + "\u0300\u0001\u0000\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303"+ + "a\u0001\u0000\u0000\u0000\u0304\u0306\u0003J\u001d\u0000\u0305\u0304\u0001"+ + "\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307\u0305\u0001"+ + "\u0000\u0000\u0000\u0307\u0308\u0001\u0000\u0000\u0000\u0308\u0309\u0001"+ + "\u0000\u0000\u0000\u0309\u030d\u0003t2\u0000\u030a\u030c\u0003J\u001d"+ + "\u0000\u030b\u030a\u0001\u0000\u0000\u0000\u030c\u030f\u0001\u0000\u0000"+ + "\u0000\u030d\u030b\u0001\u0000\u0000\u0000\u030d\u030e\u0001\u0000\u0000"+ + "\u0000\u030e\u032f\u0001\u0000\u0000\u0000\u030f\u030d\u0001\u0000\u0000"+ + "\u0000\u0310\u0312\u0003t2\u0000\u0311\u0313\u0003J\u001d\u0000\u0312"+ + "\u0311\u0001\u0000\u0000\u0000\u0313\u0314\u0001\u0000\u0000\u0000\u0314"+ + "\u0312\u0001\u0000\u0000\u0000\u0314\u0315\u0001\u0000\u0000\u0000\u0315"+ + "\u032f\u0001\u0000\u0000\u0000\u0316\u0318\u0003J\u001d\u0000\u0317\u0316"+ + "\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319\u0317"+ + "\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u0322"+ + "\u0001\u0000\u0000\u0000\u031b\u031f\u0003t2\u0000\u031c\u031e\u0003J"+ + "\u001d\u0000\u031d\u031c\u0001\u0000\u0000\u0000\u031e\u0321\u0001\u0000"+ + "\u0000\u0000\u031f\u031d\u0001\u0000\u0000\u0000\u031f\u0320\u0001\u0000"+ + "\u0000\u0000\u0320\u0323\u0001\u0000\u0000\u0000\u0321\u031f\u0001\u0000"+ + "\u0000\u0000\u0322\u031b\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000"+ + "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0325\u0003R!\u0000"+ + "\u0325\u032f\u0001\u0000\u0000\u0000\u0326\u0328\u0003t2\u0000\u0327\u0329"+ + "\u0003J\u001d\u0000\u0328\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001"+ + "\u0000\u0000\u0000\u032a\u0328\u0001\u0000\u0000\u0000\u032a\u032b\u0001"+ + "\u0000\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ + "R!\u0000\u032d\u032f\u0001\u0000\u0000\u0000\u032e\u0305\u0001\u0000\u0000"+ + "\u0000\u032e\u0310\u0001\u0000\u0000\u0000\u032e\u0317\u0001\u0000\u0000"+ + "\u0000\u032e\u0326\u0001\u0000\u0000\u0000\u032fc\u0001\u0000\u0000\u0000"+ + "\u0330\u0331\u0007\u001f\u0000\u0000\u0331\u0332\u0007 \u0000\u0000\u0332"+ + "e\u0001\u0000\u0000\u0000\u0333\u0334\u0007\f\u0000\u0000\u0334\u0335"+ + "\u0007\t\u0000\u0000\u0335\u0336\u0007\u0000\u0000\u0000\u0336g\u0001"+ + "\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\u0002"+ + "\u0000\u0000\u0339\u033a\u0007\u0004\u0000\u0000\u033ai\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005=\u0000\u0000\u033ck\u0001\u0000\u0000\u0000\u033d"+ + "\u033e\u0005:\u0000\u0000\u033e\u033f\u0005:\u0000\u0000\u033fm\u0001"+ + "\u0000\u0000\u0000\u0340\u0341\u0005:\u0000\u0000\u0341o\u0001\u0000\u0000"+ + "\u0000\u0342\u0343\u0005,\u0000\u0000\u0343q\u0001\u0000\u0000\u0000\u0344"+ + "\u0345\u0007\u0000\u0000\u0000\u0345\u0346\u0007\u0003\u0000\u0000\u0346"+ + "\u0347\u0007\u0002\u0000\u0000\u0347\u0348\u0007\u0004\u0000\u0000\u0348"+ + "s\u0001\u0000\u0000\u0000\u0349\u034a\u0005.\u0000\u0000\u034au\u0001"+ + "\u0000\u0000\u0000\u034b\u034c\u0007\u000f\u0000\u0000\u034c\u034d\u0007"+ + "\f\u0000\u0000\u034d\u034e\u0007\r\u0000\u0000\u034e\u034f\u0007\u0002"+ + "\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350w\u0001\u0000\u0000"+ + "\u0000\u0351\u0352\u0007\u000f\u0000\u0000\u0352\u0353\u0007\u0001\u0000"+ + "\u0000\u0353\u0354\u0007\u0006\u0000\u0000\u0354\u0355\u0007\u0002\u0000"+ + "\u0000\u0355\u0356\u0007\u0005\u0000\u0000\u0356y\u0001\u0000\u0000\u0000"+ + "\u0357\u0358\u0007\u0001\u0000\u0000\u0358\u0359\u0007\t\u0000\u0000\u0359"+ + "{\u0001\u0000\u0000\u0000\u035a\u035b\u0007\u0001\u0000\u0000\u035b\u035c"+ + "\u0007\u0002\u0000\u0000\u035c}\u0001\u0000\u0000\u0000\u035d\u035e\u0007"+ + "\r\u0000\u0000\u035e\u035f\u0007\f\u0000\u0000\u035f\u0360\u0007\u0002"+ + "\u0000\u0000\u0360\u0361\u0007\u0005\u0000\u0000\u0361\u007f\u0001\u0000"+ + "\u0000\u0000\u0362\u0363\u0007\r\u0000\u0000\u0363\u0364\u0007\u0001\u0000"+ + "\u0000\u0364\u0365\u0007\u0012\u0000\u0000\u0365\u0366\u0007\u0003\u0000"+ + "\u0000\u0366\u0081\u0001\u0000\u0000\u0000\u0367\u0368\u0005(\u0000\u0000"+ + "\u0368\u0083\u0001\u0000\u0000\u0000\u0369\u036a\u0007\t\u0000\u0000\u036a"+ + "\u036b\u0007\u0007\u0000\u0000\u036b\u036c\u0007\u0005\u0000\u0000\u036c"+ + "\u0085\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e\u036f"+ + "\u0007\u0014\u0000\u0000\u036f\u0370\u0007\r\u0000\u0000\u0370\u0371\u0007"+ + "\r\u0000\u0000\u0371\u0087\u0001\u0000\u0000\u0000\u0372\u0373\u0007\t"+ + "\u0000\u0000\u0373\u0374\u0007\u0014\u0000\u0000\u0374\u0375\u0007\r\u0000"+ + "\u0000\u0375\u0376\u0007\r\u0000\u0000\u0376\u0377\u0007\u0002\u0000\u0000"+ + "\u0377\u0089\u0001\u0000\u0000\u0000\u0378\u0379\u0007\u0007\u0000\u0000"+ + "\u0379\u037a\u0007\u0006\u0000\u0000\u037a\u008b\u0001\u0000\u0000\u0000"+ + "\u037b\u037c\u0005?\u0000\u0000\u037c\u008d\u0001\u0000\u0000\u0000\u037d"+ + "\u037e\u0007\u0006\u0000\u0000\u037e\u037f\u0007\r\u0000\u0000\u037f\u0380"+ + "\u0007\u0001\u0000\u0000\u0380\u0381\u0007\u0012\u0000\u0000\u0381\u0382"+ + "\u0007\u0003\u0000\u0000\u0382\u008f\u0001\u0000\u0000\u0000\u0383\u0384"+ + "\u0005)\u0000\u0000\u0384\u0091\u0001\u0000\u0000\u0000\u0385\u0386\u0007"+ + "\u0005\u0000\u0000\u0386\u0387\u0007\u0006\u0000\u0000\u0387\u0388\u0007"+ + "\u0014\u0000\u0000\u0388\u0389\u0007\u0003\u0000\u0000\u0389\u0093\u0001"+ + "\u0000\u0000\u0000\u038a\u038b\u0005=\u0000\u0000\u038b\u038c\u0005=\u0000"+ + "\u0000\u038c\u0095\u0001\u0000\u0000\u0000\u038d\u038e\u0005=\u0000\u0000"+ + "\u038e\u038f\u0005~\u0000\u0000\u038f\u0097\u0001\u0000\u0000\u0000\u0390"+ + "\u0391\u0005!\u0000\u0000\u0391\u0392\u0005=\u0000\u0000\u0392\u0099\u0001"+ + "\u0000\u0000\u0000\u0393\u0394\u0005<\u0000\u0000\u0394\u009b\u0001\u0000"+ + "\u0000\u0000\u0395\u0396\u0005<\u0000\u0000\u0396\u0397\u0005=\u0000\u0000"+ + "\u0397\u009d\u0001\u0000\u0000\u0000\u0398\u0399\u0005>\u0000\u0000\u0399"+ + "\u009f\u0001\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u039c"+ + "\u0005=\u0000\u0000\u039c\u00a1\u0001\u0000\u0000\u0000\u039d\u039e\u0005"+ + "+\u0000\u0000\u039e\u00a3\u0001\u0000\u0000\u0000\u039f\u03a0\u0005-\u0000"+ + "\u0000\u03a0\u00a5\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005*\u0000\u0000"+ + "\u03a2\u00a7\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005/\u0000\u0000\u03a4"+ + "\u00a9\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005%\u0000\u0000\u03a6\u00ab"+ + "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003.\u000f\u0000\u03a8\u03a9\u0001"+ + "\u0000\u0000\u0000\u03a9\u03aa\u0006N\r\u0000\u03aa\u00ad\u0001\u0000"+ + "\u0000\u0000\u03ab\u03ae\u0003\u008c>\u0000\u03ac\u03af\u0003L\u001e\u0000"+ + "\u03ad\u03af\u0003Z%\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae\u03ad"+ + "\u0001\u0000\u0000\u0000\u03af\u03b3\u0001\u0000\u0000\u0000\u03b0\u03b2"+ + "\u0003\\&\u0000\u03b1\u03b0\u0001\u0000\u0000\u0000\u03b2\u03b5\u0001"+ + "\u0000\u0000\u0000\u03b3\u03b1\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001"+ + "\u0000\u0000\u0000\u03b4\u03bd\u0001\u0000\u0000\u0000\u03b5\u03b3\u0001"+ + "\u0000\u0000\u0000\u03b6\u03b8\u0003\u008c>\u0000\u03b7\u03b9\u0003J\u001d"+ + "\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03ba\u0001\u0000\u0000"+ + "\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb\u0001\u0000\u0000"+ + "\u0000\u03bb\u03bd\u0001\u0000\u0000\u0000\u03bc\u03ab\u0001\u0000\u0000"+ + "\u0000\u03bc\u03b6\u0001\u0000\u0000\u0000\u03bd\u00af\u0001\u0000\u0000"+ + "\u0000\u03be\u03bf\u0005[\u0000\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0006P\u0000\u0000\u03c1\u03c2\u0006P\u0000\u0000\u03c2\u00b1"+ + "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0005]\u0000\u0000\u03c4\u03c5\u0001"+ + "\u0000\u0000\u0000\u03c5\u03c6\u0006Q\f\u0000\u03c6\u03c7\u0006Q\f\u0000"+ + "\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03cc\u0003L\u001e\u0000\u03c9"+ + "\u03cb\u0003\\&\u0000\u03ca\u03c9\u0001\u0000\u0000\u0000\u03cb\u03ce"+ + "\u0001\u0000\u0000\u0000\u03cc\u03ca\u0001\u0000\u0000\u0000\u03cc\u03cd"+ + "\u0001\u0000\u0000\u0000\u03cd\u03d9\u0001\u0000\u0000\u0000\u03ce\u03cc"+ + "\u0001\u0000\u0000\u0000\u03cf\u03d2\u0003Z%\u0000\u03d0\u03d2\u0003T"+ + "\"\u0000\u03d1\u03cf\u0001\u0000\u0000\u0000\u03d1\u03d0\u0001\u0000\u0000"+ + "\u0000\u03d2\u03d4\u0001\u0000\u0000\u0000\u03d3\u03d5\u0003\\&\u0000"+ + "\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ + "\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ + "\u03d7\u03d9\u0001\u0000\u0000\u0000\u03d8\u03c8\u0001\u0000\u0000\u0000"+ + "\u03d8\u03d1\u0001\u0000\u0000\u0000\u03d9\u00b5\u0001\u0000\u0000\u0000"+ + "\u03da\u03dc\u0003V#\u0000\u03db\u03dd\u0003X$\u0000\u03dc\u03db\u0001"+ + "\u0000\u0000\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03dc\u0001"+ + "\u0000\u0000\u0000\u03de\u03df\u0001\u0000\u0000\u0000\u03df\u03e0\u0001"+ + "\u0000\u0000\u0000\u03e0\u03e1\u0003V#\u0000\u03e1\u00b7\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e3\u0003\u00b6S\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000"+ + "\u03e4\u03e5\u0003B\u0019\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ + "\u03e7\u0006U\u000b\u0000\u03e7\u00bb\u0001\u0000\u0000\u0000\u03e8\u03e9"+ + "\u0003D\u001a\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006"+ + "V\u000b\u0000\u03eb\u00bd\u0001\u0000\u0000\u0000\u03ec\u03ed\u0003F\u001b"+ + "\u0000\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006W\u000b\u0000"+ + "\u03ef\u00bf\u0001\u0000\u0000\u0000\u03f0\u03f1\u0003\u00b0P\u0000\u03f1"+ + "\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006X\u000e\u0000\u03f3\u03f4"+ + "\u0006X\u000f\u0000\u03f4\u00c1\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ + "H\u001c\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006Y\u0010"+ + "\u0000\u03f8\u03f9\u0006Y\f\u0000\u03f9\u00c3\u0001\u0000\u0000\u0000"+ + "\u03fa\u03fb\u0003F\u001b\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ + "\u03fd\u0006Z\u000b\u0000\u03fd\u00c5\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u0003B\u0019\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ + "[\u000b\u0000\u0401\u00c7\u0001\u0000\u0000\u0000\u0402\u0403\u0003D\u001a"+ + "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006\\\u000b\u0000"+ + "\u0405\u00c9\u0001\u0000\u0000\u0000\u0406\u0407\u0003H\u001c\u0000\u0407"+ + "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006]\u0010\u0000\u0409\u040a"+ + "\u0006]\f\u0000\u040a\u00cb\u0001\u0000\u0000\u0000\u040b\u040c\u0003"+ + "\u00b0P\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006^\u000e"+ + "\u0000\u040e\u00cd\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00b2Q\u0000"+ + "\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006_\u0011\u0000\u0412"+ + "\u00cf\u0001\u0000\u0000\u0000\u0413\u0414\u0003n/\u0000\u0414\u0415\u0001"+ + "\u0000\u0000\u0000\u0415\u0416\u0006`\u0012\u0000\u0416\u00d1\u0001\u0000"+ + "\u0000\u0000\u0417\u0418\u0003p0\u0000\u0418\u0419\u0001\u0000\u0000\u0000"+ + "\u0419\u041a\u0006a\u0013\u0000\u041a\u00d3\u0001\u0000\u0000\u0000\u041b"+ + "\u041c\u0003j-\u0000\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006"+ + "b\u0014\u0000\u041e\u00d5\u0001\u0000\u0000\u0000\u041f\u0420\u0007\u0010"+ + "\u0000\u0000\u0420\u0421\u0007\u0003\u0000\u0000\u0421\u0422\u0007\u0005"+ + "\u0000\u0000\u0422\u0423\u0007\f\u0000\u0000\u0423\u0424\u0007\u0000\u0000"+ + "\u0000\u0424\u0425\u0007\f\u0000\u0000\u0425\u0426\u0007\u0005\u0000\u0000"+ + "\u0426\u0427\u0007\f\u0000\u0000\u0427\u00d7\u0001\u0000\u0000\u0000\u0428"+ + "\u042c\b!\u0000\u0000\u0429\u042a\u0005/\u0000\u0000\u042a\u042c\b\"\u0000"+ + "\u0000\u042b\u0428\u0001\u0000\u0000\u0000\u042b\u0429\u0001\u0000\u0000"+ + "\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042f\u0003\u00d8d\u0000"+ + "\u042e\u042d\u0001\u0000\u0000\u0000\u042f\u0430\u0001\u0000\u0000\u0000"+ + "\u0430\u042e\u0001\u0000\u0000\u0000\u0430\u0431\u0001\u0000\u0000\u0000"+ + "\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0433\u0003\u00dae\u0000\u0433"+ + "\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006f\u0015\u0000\u0435\u00dd"+ + "\u0001\u0000\u0000\u0000\u0436\u0437\u0003^\'\u0000\u0437\u0438\u0001"+ + "\u0000\u0000\u0000\u0438\u0439\u0006g\u0016\u0000\u0439\u00df\u0001\u0000"+ + "\u0000\u0000\u043a\u043b\u0003B\u0019\u0000\u043b\u043c\u0001\u0000\u0000"+ + "\u0000\u043c\u043d\u0006h\u000b\u0000\u043d\u00e1\u0001\u0000\u0000\u0000"+ + "\u043e\u043f\u0003D\u001a\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ + "\u0441\u0006i\u000b\u0000\u0441\u00e3\u0001\u0000\u0000\u0000\u0442\u0443"+ + "\u0003F\u001b\u0000\u0443\u0444\u0001\u0000\u0000\u0000\u0444\u0445\u0006"+ + "j\u000b\u0000\u0445\u00e5\u0001\u0000\u0000\u0000\u0446\u0447\u0003H\u001c"+ + "\u0000\u0447\u0448\u0001\u0000\u0000\u0000\u0448\u0449\u0006k\u0010\u0000"+ + "\u0449\u044a\u0006k\f\u0000\u044a\u00e7\u0001\u0000\u0000\u0000\u044b"+ + "\u044c\u0003t2\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u044e\u0006"+ + "l\u0017\u0000\u044e\u00e9\u0001\u0000\u0000\u0000\u044f\u0450\u0003p0"+ + "\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006m\u0013\u0000"+ + "\u0452\u00eb\u0001\u0000\u0000\u0000\u0453\u0454\u0004n\b\u0000\u0454"+ + "\u0455\u0003\u008c>\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457"+ + "\u0006n\u0018\u0000\u0457\u00ed\u0001\u0000\u0000\u0000\u0458\u0459\u0004"+ + "o\t\u0000\u0459\u045a\u0003\u00aeO\u0000\u045a\u045b\u0001\u0000\u0000"+ + "\u0000\u045b\u045c\u0006o\u0019\u0000\u045c\u00ef\u0001\u0000\u0000\u0000"+ + "\u045d\u0462\u0003L\u001e\u0000\u045e\u0462\u0003J\u001d\u0000\u045f\u0462"+ + "\u0003Z%\u0000\u0460\u0462\u0003\u00a6K\u0000\u0461\u045d\u0001\u0000"+ + "\u0000\u0000\u0461\u045e\u0001\u0000\u0000\u0000\u0461\u045f\u0001\u0000"+ + "\u0000\u0000\u0461\u0460\u0001\u0000\u0000\u0000\u0462\u00f1\u0001\u0000"+ + "\u0000\u0000\u0463\u0466\u0003L\u001e\u0000\u0464\u0466\u0003\u00a6K\u0000"+ + "\u0465\u0463\u0001\u0000\u0000\u0000\u0465\u0464\u0001\u0000\u0000\u0000"+ + "\u0466\u046a\u0001\u0000\u0000\u0000\u0467\u0469\u0003\u00f0p\u0000\u0468"+ + "\u0467\u0001\u0000\u0000\u0000\u0469\u046c\u0001\u0000\u0000\u0000\u046a"+ + "\u0468\u0001\u0000\u0000\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b"+ + "\u0477\u0001\u0000\u0000\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046d"+ + "\u0470\u0003Z%\u0000\u046e\u0470\u0003T\"\u0000\u046f\u046d\u0001\u0000"+ + "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0472\u0001\u0000"+ + "\u0000\u0000\u0471\u0473\u0003\u00f0p\u0000\u0472\u0471\u0001\u0000\u0000"+ + "\u0000\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0472\u0001\u0000\u0000"+ + "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0477\u0001\u0000\u0000"+ + "\u0000\u0476\u0465\u0001\u0000\u0000\u0000\u0476\u046f\u0001\u0000\u0000"+ + "\u0000\u0477\u00f3\u0001\u0000\u0000\u0000\u0478\u047b\u0003\u00f2q\u0000"+ + "\u0479\u047b\u0003\u00b6S\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047a"+ + "\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c"+ + "\u047a\u0001\u0000\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d"+ + "\u00f5\u0001\u0000\u0000\u0000\u047e\u047f\u0003B\u0019\u0000\u047f\u0480"+ + "\u0001\u0000\u0000\u0000\u0480\u0481\u0006s\u000b\u0000\u0481\u00f7\u0001"+ + "\u0000\u0000\u0000\u0482\u0483\u0003D\u001a\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u0485\u0006t\u000b\u0000\u0485\u00f9\u0001\u0000\u0000"+ + "\u0000\u0486\u0487\u0003F\u001b\u0000\u0487\u0488\u0001\u0000\u0000\u0000"+ + "\u0488\u0489\u0006u\u000b\u0000\u0489\u00fb\u0001\u0000\u0000\u0000\u048a"+ + "\u048b\u0003H\u001c\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d"+ + "\u0006v\u0010\u0000\u048d\u048e\u0006v\f\u0000\u048e\u00fd\u0001\u0000"+ + "\u0000\u0000\u048f\u0490\u0003j-\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ + "\u0491\u0492\u0006w\u0014\u0000\u0492\u00ff\u0001\u0000\u0000\u0000\u0493"+ + "\u0494\u0003p0\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ + "x\u0013\u0000\u0496\u0101\u0001\u0000\u0000\u0000\u0497\u0498\u0003t2"+ + "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006y\u0017\u0000"+ + "\u049a\u0103\u0001\u0000\u0000\u0000\u049b\u049c\u0004z\n\u0000\u049c"+ + "\u049d\u0003\u008c>\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f"+ + "\u0006z\u0018\u0000\u049f\u0105\u0001\u0000\u0000\u0000\u04a0\u04a1\u0004"+ + "{\u000b\u0000\u04a1\u04a2\u0003\u00aeO\u0000\u04a2\u04a3\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0006{\u0019\u0000\u04a4\u0107\u0001\u0000\u0000\u0000"+ + "\u04a5\u04a6\u0007\f\u0000\u0000\u04a6\u04a7\u0007\u0002\u0000\u0000\u04a7"+ + "\u0109\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003\u00f4r\u0000\u04a9\u04aa"+ + "\u0001\u0000\u0000\u0000\u04aa\u04ab\u0006}\u001a\u0000\u04ab\u010b\u0001"+ + "\u0000\u0000\u0000\u04ac\u04ad\u0003B\u0019\u0000\u04ad\u04ae\u0001\u0000"+ + "\u0000\u0000\u04ae\u04af\u0006~\u000b\u0000\u04af\u010d\u0001\u0000\u0000"+ + "\u0000\u04b0\u04b1\u0003D\u001a\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000"+ + "\u04b2\u04b3\u0006\u007f\u000b\u0000\u04b3\u010f\u0001\u0000\u0000\u0000"+ + "\u04b4\u04b5\u0003F\u001b\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ + "\u04b7\u0006\u0080\u000b\u0000\u04b7\u0111\u0001\u0000\u0000\u0000\u04b8"+ + "\u04b9\u0003H\u001c\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ + "\u0006\u0081\u0010\u0000\u04bb\u04bc\u0006\u0081\f\u0000\u04bc\u0113\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0003\u00b0P\u0000\u04be\u04bf\u0001\u0000"+ + "\u0000\u0000\u04bf\u04c0\u0006\u0082\u000e\u0000\u04c0\u04c1\u0006\u0082"+ + "\u001b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000\u04c2\u04c3\u0007\u0007"+ + "\u0000\u0000\u04c3\u04c4\u0007\t\u0000\u0000\u04c4\u04c5\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0006\u0083\u001c\u0000\u04c6\u0117\u0001\u0000\u0000"+ + "\u0000\u04c7\u04c8\u0007\u0013\u0000\u0000\u04c8\u04c9\u0007\u0001\u0000"+ + "\u0000\u04c9\u04ca\u0007\u0005\u0000\u0000\u04ca\u04cb\u0007\n\u0000\u0000"+ + "\u04cb\u04cc\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000"+ + "\u04cd\u0119\u0001\u0000\u0000\u0000\u04ce\u04cf\b#\u0000\u0000\u04cf"+ + "\u011b\u0001\u0000\u0000\u0000\u04d0\u04d2\u0003\u011a\u0085\u0000\u04d1"+ + "\u04d0\u0001\u0000\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3"+ + "\u04d1\u0001\u0000\u0000\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ + "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003n/\u0000\u04d6\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d7\u04d1\u0001\u0000\u0000\u0000\u04d7\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d8\u04da\u0001\u0000\u0000\u0000\u04d9\u04db\u0003"+ + "\u011a\u0085\u0000\u04da\u04d9\u0001\u0000\u0000\u0000\u04db\u04dc\u0001"+ + "\u0000\u0000\u0000\u04dc\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001"+ + "\u0000\u0000\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\u0003"+ + "\u011c\u0086\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u04e1\u0006"+ + "\u0087\u001d\u0000\u04e1\u011f\u0001\u0000\u0000\u0000\u04e2\u04e3\u0003"+ + "B\u0019\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006\u0088"+ + "\u000b\u0000\u04e5\u0121\u0001\u0000\u0000\u0000\u04e6\u04e7\u0003D\u001a"+ + "\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8\u04e9\u0006\u0089\u000b"+ + "\u0000\u04e9\u0123\u0001\u0000\u0000\u0000\u04ea\u04eb\u0003F\u001b\u0000"+ + "\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed\u0006\u008a\u000b\u0000"+ + "\u04ed\u0125\u0001\u0000\u0000\u0000\u04ee\u04ef\u0003H\u001c\u0000\u04ef"+ + "\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006\u008b\u0010\u0000\u04f1"+ + "\u04f2\u0006\u008b\f\u0000\u04f2\u04f3\u0006\u008b\f\u0000\u04f3\u0127"+ + "\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003j-\u0000\u04f5\u04f6\u0001\u0000"+ + "\u0000\u0000\u04f6\u04f7\u0006\u008c\u0014\u0000\u04f7\u0129\u0001\u0000"+ + "\u0000\u0000\u04f8\u04f9\u0003p0\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000"+ + "\u04fa\u04fb\u0006\u008d\u0013\u0000\u04fb\u012b\u0001\u0000\u0000\u0000"+ + "\u04fc\u04fd\u0003t2\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff"+ + "\u0006\u008e\u0017\u0000\u04ff\u012d\u0001\u0000\u0000\u0000\u0500\u0501"+ + "\u0003\u0118\u0084\u0000\u0501\u0502\u0001\u0000\u0000\u0000\u0502\u0503"+ + "\u0006\u008f\u001e\u0000\u0503\u012f\u0001\u0000\u0000\u0000\u0504\u0505"+ + "\u0003\u00f4r\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507\u0006"+ + "\u0090\u001a\u0000\u0507\u0131\u0001\u0000\u0000\u0000\u0508\u0509\u0003"+ + "\u00b8T\u0000\u0509\u050a\u0001\u0000\u0000\u0000\u050a\u050b\u0006\u0091"+ + "\u001f\u0000\u050b\u0133\u0001\u0000\u0000\u0000\u050c\u050d\u0004\u0092"+ + "\f\u0000\u050d\u050e\u0003\u008c>\u0000\u050e\u050f\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0006\u0092\u0018\u0000\u0510\u0135\u0001\u0000\u0000"+ + "\u0000\u0511\u0512\u0004\u0093\r\u0000\u0512\u0513\u0003\u00aeO\u0000"+ + "\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u0093\u0019\u0000"+ + "\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0003B\u0019\u0000\u0517"+ + "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u0094\u000b\u0000\u0519"+ + "\u0139\u0001\u0000\u0000\u0000\u051a\u051b\u0003D\u001a\u0000\u051b\u051c"+ + "\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u0095\u000b\u0000\u051d\u013b"+ + "\u0001\u0000\u0000\u0000\u051e\u051f\u0003F\u001b\u0000\u051f\u0520\u0001"+ + "\u0000\u0000\u0000\u0520\u0521\u0006\u0096\u000b\u0000\u0521\u013d\u0001"+ + "\u0000\u0000\u0000\u0522\u0523\u0003H\u001c\u0000\u0523\u0524\u0001\u0000"+ + "\u0000\u0000\u0524\u0525\u0006\u0097\u0010\u0000\u0525\u0526\u0006\u0097"+ + "\f\u0000\u0526\u013f\u0001\u0000\u0000\u0000\u0527\u0528\u0003t2\u0000"+ + "\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u0098\u0017\u0000"+ + "\u052a\u0141\u0001\u0000\u0000\u0000\u052b\u052c\u0004\u0099\u000e\u0000"+ + "\u052c\u052d\u0003\u008c>\u0000\u052d\u052e\u0001\u0000\u0000\u0000\u052e"+ + "\u052f\u0006\u0099\u0018\u0000\u052f\u0143\u0001\u0000\u0000\u0000\u0530"+ + "\u0531\u0004\u009a\u000f\u0000\u0531\u0532\u0003\u00aeO\u0000\u0532\u0533"+ + "\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0019\u0000\u0534\u0145"+ + "\u0001\u0000\u0000\u0000\u0535\u0536\u0003\u00b8T\u0000\u0536\u0537\u0001"+ + "\u0000\u0000\u0000\u0537\u0538\u0006\u009b\u001f\u0000\u0538\u0147\u0001"+ + "\u0000\u0000\u0000\u0539\u053a\u0003\u00b4R\u0000\u053a\u053b\u0001\u0000"+ + "\u0000\u0000\u053b\u053c\u0006\u009c \u0000\u053c\u0149\u0001\u0000\u0000"+ + "\u0000\u053d\u053e\u0003B\u0019\u0000\u053e\u053f\u0001\u0000\u0000\u0000"+ + "\u053f\u0540\u0006\u009d\u000b\u0000\u0540\u014b\u0001\u0000\u0000\u0000"+ + "\u0541\u0542\u0003D\u001a\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543"+ + "\u0544\u0006\u009e\u000b\u0000\u0544\u014d\u0001\u0000\u0000\u0000\u0545"+ + "\u0546\u0003F\u001b\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548"+ + "\u0006\u009f\u000b\u0000\u0548\u014f\u0001\u0000\u0000\u0000\u0549\u054a"+ + "\u0003H\u001c\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006"+ + "\u00a0\u0010\u0000\u054c\u054d\u0006\u00a0\f\u0000\u054d\u0151\u0001\u0000"+ + "\u0000\u0000\u054e\u054f\u0007\u0001\u0000\u0000\u054f\u0550\u0007\t\u0000"+ + "\u0000\u0550\u0551\u0007\u000f\u0000\u0000\u0551\u0552\u0007\u0007\u0000"+ + "\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553\u0554\u0003B\u0019\u0000"+ + "\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556\u0006\u00a2\u000b\u0000"+ + "\u0556\u0155\u0001\u0000\u0000\u0000\u0557\u0558\u0003D\u001a\u0000\u0558"+ + "\u0559\u0001\u0000\u0000\u0000\u0559\u055a\u0006\u00a3\u000b\u0000\u055a"+ + "\u0157\u0001\u0000\u0000\u0000\u055b\u055c\u0003F\u001b\u0000\u055c\u055d"+ + "\u0001\u0000\u0000\u0000\u055d\u055e\u0006\u00a4\u000b\u0000\u055e\u0159"+ + "\u0001\u0000\u0000\u0000\u055f\u0560\u0003\u00b2Q\u0000\u0560\u0561\u0001"+ + "\u0000\u0000\u0000\u0561\u0562\u0006\u00a5\u0011\u0000\u0562\u0563\u0006"+ + "\u00a5\f\u0000\u0563\u015b\u0001\u0000\u0000\u0000\u0564\u0565\u0003n"+ + "/\u0000\u0565\u0566\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00a6\u0012"+ + "\u0000\u0567\u015d\u0001\u0000\u0000\u0000\u0568\u056e\u0003T\"\u0000"+ + "\u0569\u056e\u0003J\u001d\u0000\u056a\u056e\u0003t2\u0000\u056b\u056e"+ + "\u0003L\u001e\u0000\u056c\u056e\u0003Z%\u0000\u056d\u0568\u0001\u0000"+ + "\u0000\u0000\u056d\u0569\u0001\u0000\u0000\u0000\u056d\u056a\u0001\u0000"+ + "\u0000\u0000\u056d\u056b\u0001\u0000\u0000\u0000\u056d\u056c\u0001\u0000"+ + "\u0000\u0000\u056e\u056f\u0001\u0000\u0000\u0000\u056f\u056d\u0001\u0000"+ + "\u0000\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u015f\u0001\u0000"+ + "\u0000\u0000\u0571\u0572\u0003B\u0019\u0000\u0572\u0573\u0001\u0000\u0000"+ + "\u0000\u0573\u0574\u0006\u00a8\u000b\u0000\u0574\u0161\u0001\u0000\u0000"+ + "\u0000\u0575\u0576\u0003D\u001a\u0000\u0576\u0577\u0001\u0000\u0000\u0000"+ + "\u0577\u0578\u0006\u00a9\u000b\u0000\u0578\u0163\u0001\u0000\u0000\u0000"+ + "\u0579\u057a\u0003F\u001b\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ + "\u057c\u0006\u00aa\u000b\u0000\u057c\u0165\u0001\u0000\u0000\u0000\u057d"+ + "\u057e\u0003H\u001c\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u0580"+ + "\u0006\u00ab\u0010\u0000\u0580\u0581\u0006\u00ab\f\u0000\u0581\u0167\u0001"+ + "\u0000\u0000\u0000\u0582\u0583\u0003n/\u0000\u0583\u0584\u0001\u0000\u0000"+ + "\u0000\u0584\u0585\u0006\u00ac\u0012\u0000\u0585\u0169\u0001\u0000\u0000"+ + "\u0000\u0586\u0587\u0003p0\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ + "\u0589\u0006\u00ad\u0013\u0000\u0589\u016b\u0001\u0000\u0000\u0000\u058a"+ + "\u058b\u0003t2\u0000\u058b\u058c\u0001\u0000\u0000\u0000\u058c\u058d\u0006"+ + "\u00ae\u0017\u0000\u058d\u016d\u0001\u0000\u0000\u0000\u058e\u058f\u0003"+ + "\u0116\u0083\u0000\u058f\u0590\u0001\u0000\u0000\u0000\u0590\u0591\u0006"+ + "\u00af!\u0000\u0591\u0592\u0006\u00af\"\u0000\u0592\u016f\u0001\u0000"+ + "\u0000\u0000\u0593\u0594\u0003\u00dae\u0000\u0594\u0595\u0001\u0000\u0000"+ + "\u0000\u0595\u0596\u0006\u00b0\u0015\u0000\u0596\u0171\u0001\u0000\u0000"+ + "\u0000\u0597\u0598\u0003^\'\u0000\u0598\u0599\u0001\u0000\u0000\u0000"+ + "\u0599\u059a\u0006\u00b1\u0016\u0000\u059a\u0173\u0001\u0000\u0000\u0000"+ + "\u059b\u059c\u0003B\u0019\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ + "\u059e\u0006\u00b2\u000b\u0000\u059e\u0175\u0001\u0000\u0000\u0000\u059f"+ + "\u05a0\u0003D\u001a\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2"+ + "\u0006\u00b3\u000b\u0000\u05a2\u0177\u0001\u0000\u0000\u0000\u05a3\u05a4"+ + "\u0003F\u001b\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006"+ + "\u00b4\u000b\u0000\u05a6\u0179\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003"+ + "H\u001c\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000\u05a9\u05aa\u0006\u00b5"+ + "\u0010\u0000\u05aa\u05ab\u0006\u00b5\f\u0000\u05ab\u05ac\u0006\u00b5\f"+ + "\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae\u0003p0\u0000\u05ae"+ + "\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006\u00b6\u0013\u0000\u05b0"+ + "\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003t2\u0000\u05b2\u05b3\u0001"+ + "\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7\u0017\u0000\u05b4\u017f\u0001"+ + "\u0000\u0000\u0000\u05b5\u05b6\u0003\u00f4r\u0000\u05b6\u05b7\u0001\u0000"+ + "\u0000\u0000\u05b7\u05b8\u0006\u00b8\u001a\u0000\u05b8\u0181\u0001\u0000"+ + "\u0000\u0000\u05b9\u05ba\u0003B\u0019\u0000\u05ba\u05bb\u0001\u0000\u0000"+ + "\u0000\u05bb\u05bc\u0006\u00b9\u000b\u0000\u05bc\u0183\u0001\u0000\u0000"+ + "\u0000\u05bd\u05be\u0003D\u001a\u0000\u05be\u05bf\u0001\u0000\u0000\u0000"+ + "\u05bf\u05c0\u0006\u00ba\u000b\u0000\u05c0\u0185\u0001\u0000\u0000\u0000"+ + "\u05c1\u05c2\u0003F\u001b\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3"+ + "\u05c4\u0006\u00bb\u000b\u0000\u05c4\u0187\u0001\u0000\u0000\u0000\u05c5"+ + "\u05c6\u0003H\u001c\u0000\u05c6\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8"+ + "\u0006\u00bc\u0010\u0000\u05c8\u05c9\u0006\u00bc\f\u0000\u05c9\u0189\u0001"+ + "\u0000\u0000\u0000\u05ca\u05cb\u00036\u0013\u0000\u05cb\u05cc\u0001\u0000"+ + "\u0000\u0000\u05cc\u05cd\u0006\u00bd#\u0000\u05cd\u018b\u0001\u0000\u0000"+ + "\u0000\u05ce\u05cf\u0003\u0108|\u0000\u05cf\u05d0\u0001\u0000\u0000\u0000"+ + "\u05d0\u05d1\u0006\u00be$\u0000\u05d1\u018d\u0001\u0000\u0000\u0000\u05d2"+ + "\u05d3\u0003\u0116\u0083\u0000\u05d3\u05d4\u0001\u0000\u0000\u0000\u05d4"+ + "\u05d5\u0006\u00bf!\u0000\u05d5\u05d6\u0006\u00bf\f\u0000\u05d6\u05d7"+ + "\u0006\u00bf\u0000\u0000\u05d7\u018f\u0001\u0000\u0000\u0000\u05d8\u05d9"+ + "\u0007\u0014\u0000\u0000\u05d9\u05da\u0007\u0002\u0000\u0000\u05da\u05db"+ + "\u0007\u0001\u0000\u0000\u05db\u05dc\u0007\t\u0000\u0000\u05dc\u05dd\u0007"+ + "\u0011\u0000\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de\u05df\u0006"+ + "\u00c0\f\u0000\u05df\u05e0\u0006\u00c0\u0000\u0000\u05e0\u0191\u0001\u0000"+ + "\u0000\u0000\u05e1\u05e2\u0003\u00b4R\u0000\u05e2\u05e3\u0001\u0000\u0000"+ + "\u0000\u05e3\u05e4\u0006\u00c1 \u0000\u05e4\u0193\u0001\u0000\u0000\u0000"+ + "\u05e5\u05e6\u0003\u00b8T\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7"+ + "\u05e8\u0006\u00c2\u001f\u0000\u05e8\u0195\u0001\u0000\u0000\u0000\u05e9"+ + "\u05ea\u0003B\u0019\u0000\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec"+ + "\u0006\u00c3\u000b\u0000\u05ec\u0197\u0001\u0000\u0000\u0000\u05ed\u05ee"+ + "\u0003D\u001a\u0000\u05ee\u05ef\u0001\u0000\u0000\u0000\u05ef\u05f0\u0006"+ + "\u00c4\u000b\u0000\u05f0\u0199\u0001\u0000\u0000\u0000\u05f1\u05f2\u0003"+ + "F\u001b\u0000\u05f2\u05f3\u0001\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c5"+ + "\u000b\u0000\u05f4\u019b\u0001\u0000\u0000\u0000\u05f5\u05f6\u0003H\u001c"+ + "\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000\u05f7\u05f8\u0006\u00c6\u0010"+ + "\u0000\u05f8\u05f9\u0006\u00c6\f\u0000\u05f9\u019d\u0001\u0000\u0000\u0000"+ + "\u05fa\u05fb\u0003\u00dae\u0000\u05fb\u05fc\u0001\u0000\u0000\u0000\u05fc"+ + "\u05fd\u0006\u00c7\u0015\u0000\u05fd\u05fe\u0006\u00c7\f\u0000\u05fe\u05ff"+ + "\u0006\u00c7%\u0000\u05ff\u019f\u0001\u0000\u0000\u0000\u0600\u0601\u0003"+ + "^\'\u0000\u0601\u0602\u0001\u0000\u0000\u0000\u0602\u0603\u0006\u00c8"+ + "\u0016\u0000\u0603\u0604\u0006\u00c8\f\u0000\u0604\u0605\u0006\u00c8%"+ + "\u0000\u0605\u01a1\u0001\u0000\u0000\u0000\u0606\u0607\u0003B\u0019\u0000"+ + "\u0607\u0608\u0001\u0000\u0000\u0000\u0608\u0609\u0006\u00c9\u000b\u0000"+ + "\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003D\u001a\u0000\u060b"+ + "\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca\u000b\u0000\u060d"+ + "\u01a5\u0001\u0000\u0000\u0000\u060e\u060f\u0003F\u001b\u0000\u060f\u0610"+ + "\u0001\u0000\u0000\u0000\u0610\u0611\u0006\u00cb\u000b\u0000\u0611\u01a7"+ + "\u0001\u0000\u0000\u0000\u0612\u0613\u0003n/\u0000\u0613\u0614\u0001\u0000"+ + "\u0000\u0000\u0614\u0615\u0006\u00cc\u0012\u0000\u0615\u0616\u0006\u00cc"+ + "\f\u0000\u0616\u0617\u0006\u00cc\t\u0000\u0617\u01a9\u0001\u0000\u0000"+ + "\u0000\u0618\u0619\u0003p0\u0000\u0619\u061a\u0001\u0000\u0000\u0000\u061a"+ + "\u061b\u0006\u00cd\u0013\u0000\u061b\u061c\u0006\u00cd\f\u0000\u061c\u061d"+ + "\u0006\u00cd\t\u0000\u061d\u01ab\u0001\u0000\u0000\u0000\u061e\u061f\u0003"+ + "B\u0019\u0000\u061f\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00ce"+ + "\u000b\u0000\u0621\u01ad\u0001\u0000\u0000\u0000\u0622\u0623\u0003D\u001a"+ + "\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624\u0625\u0006\u00cf\u000b"+ + "\u0000\u0625\u01af\u0001\u0000\u0000\u0000\u0626\u0627\u0003F\u001b\u0000"+ + "\u0627\u0628\u0001\u0000\u0000\u0000\u0628\u0629\u0006\u00d0\u000b\u0000"+ + "\u0629\u01b1\u0001\u0000\u0000\u0000\u062a\u062b\u0003\u00b8T\u0000\u062b"+ + "\u062c\u0001\u0000\u0000\u0000\u062c\u062d\u0006\u00d1\f\u0000\u062d\u062e"+ + "\u0006\u00d1\u0000\u0000\u062e\u062f\u0006\u00d1\u001f\u0000\u062f\u01b3"+ + "\u0001\u0000\u0000\u0000\u0630\u0631\u0003\u00b4R\u0000\u0631\u0632\u0001"+ + "\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\f\u0000\u0633\u0634\u0006\u00d2"+ + "\u0000\u0000\u0634\u0635\u0006\u00d2 \u0000\u0635\u01b5\u0001\u0000\u0000"+ + "\u0000\u0636\u0637\u0003d*\u0000\u0637\u0638\u0001\u0000\u0000\u0000\u0638"+ + "\u0639\u0006\u00d3\f\u0000\u0639\u063a\u0006\u00d3\u0000\u0000\u063a\u063b"+ + "\u0006\u00d3&\u0000\u063b\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003"+ + "H\u001c\u0000\u063d\u063e\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4"+ + "\u0010\u0000\u063f\u0640\u0006\u00d4\f\u0000\u0640\u01b9\u0001\u0000\u0000"+ + "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u000e\u000f\u028f\u0299\u029d\u02a0\u02a9\u02ab\u02b6\u02c9\u02ce\u02d7"+ + "\u02de\u02e3\u02e5\u02f0\u02f8\u02fb\u02fd\u0302\u0307\u030d\u0314\u0319"+ + "\u031f\u0322\u032a\u032e\u03ae\u03b3\u03ba\u03bc\u03cc\u03d1\u03d6\u03d8"+ + "\u03de\u042b\u0430\u0461\u0465\u046a\u046f\u0474\u0476\u047a\u047c\u04d3"+ + "\u04d7\u04dc\u056d\u056f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ + "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ + "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007F\u0000\u0005\u0000"+ + "\u0000\u0007\u001d\u0000\u0007G\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ + "$\u0000\u0007Q\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ + "E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000\u0007"+ + "^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007\u0014"+ + "\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index a2b339f378f12..50493f584fe4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -23,7 +23,11 @@ null null null null -':' +null +null +null +null +null '|' null null @@ -33,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -113,6 +118,10 @@ null null null null +'USING' +null +null +null null null null @@ -141,11 +150,15 @@ WHERE DEV_INLINESTATS DEV_LOOKUP DEV_METRICS +DEV_JOIN +DEV_JOIN_FULL +DEV_JOIN_LEFT +DEV_JOIN_RIGHT +DEV_JOIN_LOOKUP UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -155,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -235,6 +249,10 @@ LOOKUP_WS LOOKUP_FIELD_LINE_COMMENT LOOKUP_FIELD_MULTILINE_COMMENT LOOKUP_FIELD_WS +USING +JOIN_LINE_COMMENT +JOIN_MULTILINE_COMMENT +JOIN_WS METRICS_LINE_COMMENT METRICS_MULTILINE_COMMENT METRICS_WS @@ -305,7 +323,11 @@ enrichCommand enrichWithClause lookupCommand inlinestatsCommand +joinCommand +joinTarget +joinCondition +joinPredicate atn: -[4, 1, 119, 603, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 134, 8, 1, 10, 1, 12, 1, 137, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 145, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 163, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 182, 8, 5, 10, 5, 12, 5, 185, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 192, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 206, 8, 5, 10, 5, 12, 5, 209, 9, 5, 1, 6, 1, 6, 3, 6, 213, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 236, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 242, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 250, 8, 9, 10, 9, 12, 9, 253, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 263, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 268, 8, 10, 10, 10, 12, 10, 271, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 279, 8, 11, 10, 11, 12, 11, 282, 9, 11, 3, 11, 284, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 298, 8, 15, 10, 15, 12, 15, 301, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 306, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 314, 8, 17, 10, 17, 12, 17, 317, 9, 17, 1, 17, 3, 17, 320, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 325, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 335, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 341, 8, 22, 10, 22, 12, 22, 344, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 354, 8, 24, 10, 24, 12, 24, 357, 9, 24, 1, 24, 3, 24, 360, 8, 24, 1, 24, 1, 24, 3, 24, 364, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 371, 8, 26, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 380, 8, 27, 10, 27, 12, 27, 383, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 388, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 393, 8, 29, 10, 29, 12, 29, 396, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 401, 8, 30, 10, 30, 12, 30, 404, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 409, 8, 31, 10, 31, 12, 31, 412, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 419, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 434, 8, 34, 10, 34, 12, 34, 437, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 445, 8, 34, 10, 34, 12, 34, 448, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 456, 8, 34, 10, 34, 12, 34, 459, 9, 34, 1, 34, 1, 34, 3, 34, 463, 8, 34, 1, 35, 1, 35, 3, 35, 467, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 472, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 481, 8, 38, 10, 38, 12, 38, 484, 9, 38, 1, 39, 1, 39, 3, 39, 488, 8, 39, 1, 39, 1, 39, 3, 39, 492, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 504, 8, 42, 10, 42, 12, 42, 507, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 517, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 529, 8, 47, 10, 47, 12, 47, 532, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 542, 8, 50, 1, 51, 3, 51, 545, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 550, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 572, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 578, 8, 58, 10, 58, 12, 58, 581, 9, 58, 3, 58, 583, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 588, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 601, 8, 61, 1, 61, 0, 4, 2, 10, 18, 20, 62, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 26, 26, 76, 76, 1, 0, 67, 68, 2, 0, 31, 31, 35, 35, 2, 0, 38, 38, 41, 41, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 628, 0, 124, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 4, 144, 1, 0, 0, 0, 6, 162, 1, 0, 0, 0, 8, 164, 1, 0, 0, 0, 10, 197, 1, 0, 0, 0, 12, 224, 1, 0, 0, 0, 14, 226, 1, 0, 0, 0, 16, 235, 1, 0, 0, 0, 18, 241, 1, 0, 0, 0, 20, 262, 1, 0, 0, 0, 22, 272, 1, 0, 0, 0, 24, 287, 1, 0, 0, 0, 26, 289, 1, 0, 0, 0, 28, 291, 1, 0, 0, 0, 30, 294, 1, 0, 0, 0, 32, 305, 1, 0, 0, 0, 34, 309, 1, 0, 0, 0, 36, 324, 1, 0, 0, 0, 38, 328, 1, 0, 0, 0, 40, 330, 1, 0, 0, 0, 42, 334, 1, 0, 0, 0, 44, 336, 1, 0, 0, 0, 46, 345, 1, 0, 0, 0, 48, 349, 1, 0, 0, 0, 50, 365, 1, 0, 0, 0, 52, 368, 1, 0, 0, 0, 54, 376, 1, 0, 0, 0, 56, 384, 1, 0, 0, 0, 58, 389, 1, 0, 0, 0, 60, 397, 1, 0, 0, 0, 62, 405, 1, 0, 0, 0, 64, 413, 1, 0, 0, 0, 66, 418, 1, 0, 0, 0, 68, 462, 1, 0, 0, 0, 70, 466, 1, 0, 0, 0, 72, 471, 1, 0, 0, 0, 74, 473, 1, 0, 0, 0, 76, 476, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 493, 1, 0, 0, 0, 82, 496, 1, 0, 0, 0, 84, 499, 1, 0, 0, 0, 86, 508, 1, 0, 0, 0, 88, 512, 1, 0, 0, 0, 90, 518, 1, 0, 0, 0, 92, 522, 1, 0, 0, 0, 94, 525, 1, 0, 0, 0, 96, 533, 1, 0, 0, 0, 98, 537, 1, 0, 0, 0, 100, 541, 1, 0, 0, 0, 102, 544, 1, 0, 0, 0, 104, 549, 1, 0, 0, 0, 106, 553, 1, 0, 0, 0, 108, 555, 1, 0, 0, 0, 110, 557, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 564, 1, 0, 0, 0, 116, 567, 1, 0, 0, 0, 118, 587, 1, 0, 0, 0, 120, 591, 1, 0, 0, 0, 122, 596, 1, 0, 0, 0, 124, 125, 3, 2, 1, 0, 125, 126, 5, 0, 0, 1, 126, 1, 1, 0, 0, 0, 127, 128, 6, 1, -1, 0, 128, 129, 3, 4, 2, 0, 129, 135, 1, 0, 0, 0, 130, 131, 10, 1, 0, 0, 131, 132, 5, 25, 0, 0, 132, 134, 3, 6, 3, 0, 133, 130, 1, 0, 0, 0, 134, 137, 1, 0, 0, 0, 135, 133, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 3, 1, 0, 0, 0, 137, 135, 1, 0, 0, 0, 138, 145, 3, 110, 55, 0, 139, 145, 3, 34, 17, 0, 140, 145, 3, 28, 14, 0, 141, 145, 3, 114, 57, 0, 142, 143, 4, 2, 1, 0, 143, 145, 3, 48, 24, 0, 144, 138, 1, 0, 0, 0, 144, 139, 1, 0, 0, 0, 144, 140, 1, 0, 0, 0, 144, 141, 1, 0, 0, 0, 144, 142, 1, 0, 0, 0, 145, 5, 1, 0, 0, 0, 146, 163, 3, 50, 25, 0, 147, 163, 3, 8, 4, 0, 148, 163, 3, 80, 40, 0, 149, 163, 3, 74, 37, 0, 150, 163, 3, 52, 26, 0, 151, 163, 3, 76, 38, 0, 152, 163, 3, 82, 41, 0, 153, 163, 3, 84, 42, 0, 154, 163, 3, 88, 44, 0, 155, 163, 3, 90, 45, 0, 156, 163, 3, 116, 58, 0, 157, 163, 3, 92, 46, 0, 158, 159, 4, 3, 2, 0, 159, 163, 3, 122, 61, 0, 160, 161, 4, 3, 3, 0, 161, 163, 3, 120, 60, 0, 162, 146, 1, 0, 0, 0, 162, 147, 1, 0, 0, 0, 162, 148, 1, 0, 0, 0, 162, 149, 1, 0, 0, 0, 162, 150, 1, 0, 0, 0, 162, 151, 1, 0, 0, 0, 162, 152, 1, 0, 0, 0, 162, 153, 1, 0, 0, 0, 162, 154, 1, 0, 0, 0, 162, 155, 1, 0, 0, 0, 162, 156, 1, 0, 0, 0, 162, 157, 1, 0, 0, 0, 162, 158, 1, 0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 7, 1, 0, 0, 0, 164, 165, 5, 16, 0, 0, 165, 166, 3, 10, 5, 0, 166, 9, 1, 0, 0, 0, 167, 168, 6, 5, -1, 0, 168, 169, 5, 44, 0, 0, 169, 198, 3, 10, 5, 8, 170, 198, 3, 16, 8, 0, 171, 198, 3, 12, 6, 0, 172, 174, 3, 16, 8, 0, 173, 175, 5, 44, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 39, 0, 0, 177, 178, 5, 43, 0, 0, 178, 183, 3, 16, 8, 0, 179, 180, 5, 34, 0, 0, 180, 182, 3, 16, 8, 0, 181, 179, 1, 0, 0, 0, 182, 185, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 186, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 186, 187, 5, 50, 0, 0, 187, 198, 1, 0, 0, 0, 188, 189, 3, 16, 8, 0, 189, 191, 5, 40, 0, 0, 190, 192, 5, 44, 0, 0, 191, 190, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 1, 0, 0, 0, 193, 194, 5, 45, 0, 0, 194, 198, 1, 0, 0, 0, 195, 196, 4, 5, 4, 0, 196, 198, 3, 14, 7, 0, 197, 167, 1, 0, 0, 0, 197, 170, 1, 0, 0, 0, 197, 171, 1, 0, 0, 0, 197, 172, 1, 0, 0, 0, 197, 188, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 207, 1, 0, 0, 0, 199, 200, 10, 5, 0, 0, 200, 201, 5, 30, 0, 0, 201, 206, 3, 10, 5, 6, 202, 203, 10, 4, 0, 0, 203, 204, 5, 47, 0, 0, 204, 206, 3, 10, 5, 5, 205, 199, 1, 0, 0, 0, 205, 202, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 11, 1, 0, 0, 0, 209, 207, 1, 0, 0, 0, 210, 212, 3, 16, 8, 0, 211, 213, 5, 44, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 42, 0, 0, 215, 216, 3, 106, 53, 0, 216, 225, 1, 0, 0, 0, 217, 219, 3, 16, 8, 0, 218, 220, 5, 44, 0, 0, 219, 218, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 5, 49, 0, 0, 222, 223, 3, 106, 53, 0, 223, 225, 1, 0, 0, 0, 224, 210, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 225, 13, 1, 0, 0, 0, 226, 227, 3, 58, 29, 0, 227, 228, 5, 24, 0, 0, 228, 229, 3, 68, 34, 0, 229, 15, 1, 0, 0, 0, 230, 236, 3, 18, 9, 0, 231, 232, 3, 18, 9, 0, 232, 233, 3, 108, 54, 0, 233, 234, 3, 18, 9, 0, 234, 236, 1, 0, 0, 0, 235, 230, 1, 0, 0, 0, 235, 231, 1, 0, 0, 0, 236, 17, 1, 0, 0, 0, 237, 238, 6, 9, -1, 0, 238, 242, 3, 20, 10, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 3, 241, 237, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 251, 1, 0, 0, 0, 243, 244, 10, 2, 0, 0, 244, 245, 7, 1, 0, 0, 245, 250, 3, 18, 9, 3, 246, 247, 10, 1, 0, 0, 247, 248, 7, 0, 0, 0, 248, 250, 3, 18, 9, 2, 249, 243, 1, 0, 0, 0, 249, 246, 1, 0, 0, 0, 250, 253, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 251, 252, 1, 0, 0, 0, 252, 19, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 254, 255, 6, 10, -1, 0, 255, 263, 3, 68, 34, 0, 256, 263, 3, 58, 29, 0, 257, 263, 3, 22, 11, 0, 258, 259, 5, 43, 0, 0, 259, 260, 3, 10, 5, 0, 260, 261, 5, 50, 0, 0, 261, 263, 1, 0, 0, 0, 262, 254, 1, 0, 0, 0, 262, 256, 1, 0, 0, 0, 262, 257, 1, 0, 0, 0, 262, 258, 1, 0, 0, 0, 263, 269, 1, 0, 0, 0, 264, 265, 10, 1, 0, 0, 265, 266, 5, 33, 0, 0, 266, 268, 3, 26, 13, 0, 267, 264, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 21, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 3, 24, 12, 0, 273, 283, 5, 43, 0, 0, 274, 284, 5, 61, 0, 0, 275, 280, 3, 10, 5, 0, 276, 277, 5, 34, 0, 0, 277, 279, 3, 10, 5, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 274, 1, 0, 0, 0, 283, 275, 1, 0, 0, 0, 283, 284, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 286, 5, 50, 0, 0, 286, 23, 1, 0, 0, 0, 287, 288, 3, 72, 36, 0, 288, 25, 1, 0, 0, 0, 289, 290, 3, 64, 32, 0, 290, 27, 1, 0, 0, 0, 291, 292, 5, 12, 0, 0, 292, 293, 3, 30, 15, 0, 293, 29, 1, 0, 0, 0, 294, 299, 3, 32, 16, 0, 295, 296, 5, 34, 0, 0, 296, 298, 3, 32, 16, 0, 297, 295, 1, 0, 0, 0, 298, 301, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 31, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 302, 303, 3, 58, 29, 0, 303, 304, 5, 32, 0, 0, 304, 306, 1, 0, 0, 0, 305, 302, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 307, 1, 0, 0, 0, 307, 308, 3, 10, 5, 0, 308, 33, 1, 0, 0, 0, 309, 310, 5, 6, 0, 0, 310, 315, 3, 36, 18, 0, 311, 312, 5, 34, 0, 0, 312, 314, 3, 36, 18, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 320, 3, 42, 21, 0, 319, 318, 1, 0, 0, 0, 319, 320, 1, 0, 0, 0, 320, 35, 1, 0, 0, 0, 321, 322, 3, 38, 19, 0, 322, 323, 5, 24, 0, 0, 323, 325, 1, 0, 0, 0, 324, 321, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 327, 3, 40, 20, 0, 327, 37, 1, 0, 0, 0, 328, 329, 5, 76, 0, 0, 329, 39, 1, 0, 0, 0, 330, 331, 7, 2, 0, 0, 331, 41, 1, 0, 0, 0, 332, 335, 3, 44, 22, 0, 333, 335, 3, 46, 23, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, 0, 335, 43, 1, 0, 0, 0, 336, 337, 5, 75, 0, 0, 337, 342, 5, 76, 0, 0, 338, 339, 5, 34, 0, 0, 339, 341, 5, 76, 0, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 45, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 65, 0, 0, 346, 347, 3, 44, 22, 0, 347, 348, 5, 66, 0, 0, 348, 47, 1, 0, 0, 0, 349, 350, 5, 19, 0, 0, 350, 355, 3, 36, 18, 0, 351, 352, 5, 34, 0, 0, 352, 354, 3, 36, 18, 0, 353, 351, 1, 0, 0, 0, 354, 357, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 355, 356, 1, 0, 0, 0, 356, 359, 1, 0, 0, 0, 357, 355, 1, 0, 0, 0, 358, 360, 3, 54, 27, 0, 359, 358, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 363, 1, 0, 0, 0, 361, 362, 5, 29, 0, 0, 362, 364, 3, 30, 15, 0, 363, 361, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 49, 1, 0, 0, 0, 365, 366, 5, 4, 0, 0, 366, 367, 3, 30, 15, 0, 367, 51, 1, 0, 0, 0, 368, 370, 5, 15, 0, 0, 369, 371, 3, 54, 27, 0, 370, 369, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 374, 1, 0, 0, 0, 372, 373, 5, 29, 0, 0, 373, 375, 3, 30, 15, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 381, 3, 56, 28, 0, 377, 378, 5, 34, 0, 0, 378, 380, 3, 56, 28, 0, 379, 377, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 381, 382, 1, 0, 0, 0, 382, 55, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 384, 387, 3, 32, 16, 0, 385, 386, 5, 16, 0, 0, 386, 388, 3, 10, 5, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 394, 3, 72, 36, 0, 390, 391, 5, 36, 0, 0, 391, 393, 3, 72, 36, 0, 392, 390, 1, 0, 0, 0, 393, 396, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 59, 1, 0, 0, 0, 396, 394, 1, 0, 0, 0, 397, 402, 3, 66, 33, 0, 398, 399, 5, 36, 0, 0, 399, 401, 3, 66, 33, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 61, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 410, 3, 60, 30, 0, 406, 407, 5, 34, 0, 0, 407, 409, 3, 60, 30, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 63, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 7, 3, 0, 0, 414, 65, 1, 0, 0, 0, 415, 419, 5, 80, 0, 0, 416, 417, 4, 33, 10, 0, 417, 419, 3, 70, 35, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 419, 67, 1, 0, 0, 0, 420, 463, 5, 45, 0, 0, 421, 422, 3, 104, 52, 0, 422, 423, 5, 67, 0, 0, 423, 463, 1, 0, 0, 0, 424, 463, 3, 102, 51, 0, 425, 463, 3, 104, 52, 0, 426, 463, 3, 98, 49, 0, 427, 463, 3, 70, 35, 0, 428, 463, 3, 106, 53, 0, 429, 430, 5, 65, 0, 0, 430, 435, 3, 100, 50, 0, 431, 432, 5, 34, 0, 0, 432, 434, 3, 100, 50, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 438, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 66, 0, 0, 439, 463, 1, 0, 0, 0, 440, 441, 5, 65, 0, 0, 441, 446, 3, 98, 49, 0, 442, 443, 5, 34, 0, 0, 443, 445, 3, 98, 49, 0, 444, 442, 1, 0, 0, 0, 445, 448, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 449, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 449, 450, 5, 66, 0, 0, 450, 463, 1, 0, 0, 0, 451, 452, 5, 65, 0, 0, 452, 457, 3, 106, 53, 0, 453, 454, 5, 34, 0, 0, 454, 456, 3, 106, 53, 0, 455, 453, 1, 0, 0, 0, 456, 459, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 460, 1, 0, 0, 0, 459, 457, 1, 0, 0, 0, 460, 461, 5, 66, 0, 0, 461, 463, 1, 0, 0, 0, 462, 420, 1, 0, 0, 0, 462, 421, 1, 0, 0, 0, 462, 424, 1, 0, 0, 0, 462, 425, 1, 0, 0, 0, 462, 426, 1, 0, 0, 0, 462, 427, 1, 0, 0, 0, 462, 428, 1, 0, 0, 0, 462, 429, 1, 0, 0, 0, 462, 440, 1, 0, 0, 0, 462, 451, 1, 0, 0, 0, 463, 69, 1, 0, 0, 0, 464, 467, 5, 48, 0, 0, 465, 467, 5, 64, 0, 0, 466, 464, 1, 0, 0, 0, 466, 465, 1, 0, 0, 0, 467, 71, 1, 0, 0, 0, 468, 472, 3, 64, 32, 0, 469, 470, 4, 36, 11, 0, 470, 472, 3, 70, 35, 0, 471, 468, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 472, 73, 1, 0, 0, 0, 473, 474, 5, 9, 0, 0, 474, 475, 5, 27, 0, 0, 475, 75, 1, 0, 0, 0, 476, 477, 5, 14, 0, 0, 477, 482, 3, 78, 39, 0, 478, 479, 5, 34, 0, 0, 479, 481, 3, 78, 39, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 77, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 487, 3, 10, 5, 0, 486, 488, 7, 4, 0, 0, 487, 486, 1, 0, 0, 0, 487, 488, 1, 0, 0, 0, 488, 491, 1, 0, 0, 0, 489, 490, 5, 46, 0, 0, 490, 492, 7, 5, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 79, 1, 0, 0, 0, 493, 494, 5, 8, 0, 0, 494, 495, 3, 62, 31, 0, 495, 81, 1, 0, 0, 0, 496, 497, 5, 2, 0, 0, 497, 498, 3, 62, 31, 0, 498, 83, 1, 0, 0, 0, 499, 500, 5, 11, 0, 0, 500, 505, 3, 86, 43, 0, 501, 502, 5, 34, 0, 0, 502, 504, 3, 86, 43, 0, 503, 501, 1, 0, 0, 0, 504, 507, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 505, 506, 1, 0, 0, 0, 506, 85, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 508, 509, 3, 60, 30, 0, 509, 510, 5, 84, 0, 0, 510, 511, 3, 60, 30, 0, 511, 87, 1, 0, 0, 0, 512, 513, 5, 1, 0, 0, 513, 514, 3, 20, 10, 0, 514, 516, 3, 106, 53, 0, 515, 517, 3, 94, 47, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 89, 1, 0, 0, 0, 518, 519, 5, 7, 0, 0, 519, 520, 3, 20, 10, 0, 520, 521, 3, 106, 53, 0, 521, 91, 1, 0, 0, 0, 522, 523, 5, 10, 0, 0, 523, 524, 3, 58, 29, 0, 524, 93, 1, 0, 0, 0, 525, 530, 3, 96, 48, 0, 526, 527, 5, 34, 0, 0, 527, 529, 3, 96, 48, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 95, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 64, 32, 0, 534, 535, 5, 32, 0, 0, 535, 536, 3, 68, 34, 0, 536, 97, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 99, 1, 0, 0, 0, 539, 542, 3, 102, 51, 0, 540, 542, 3, 104, 52, 0, 541, 539, 1, 0, 0, 0, 541, 540, 1, 0, 0, 0, 542, 101, 1, 0, 0, 0, 543, 545, 7, 0, 0, 0, 544, 543, 1, 0, 0, 0, 544, 545, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 5, 28, 0, 0, 547, 103, 1, 0, 0, 0, 548, 550, 7, 0, 0, 0, 549, 548, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 5, 27, 0, 0, 552, 105, 1, 0, 0, 0, 553, 554, 5, 26, 0, 0, 554, 107, 1, 0, 0, 0, 555, 556, 7, 7, 0, 0, 556, 109, 1, 0, 0, 0, 557, 558, 5, 5, 0, 0, 558, 559, 3, 112, 56, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 65, 0, 0, 561, 562, 3, 2, 1, 0, 562, 563, 5, 66, 0, 0, 563, 113, 1, 0, 0, 0, 564, 565, 5, 13, 0, 0, 565, 566, 5, 100, 0, 0, 566, 115, 1, 0, 0, 0, 567, 568, 5, 3, 0, 0, 568, 571, 5, 90, 0, 0, 569, 570, 5, 88, 0, 0, 570, 572, 3, 60, 30, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 582, 1, 0, 0, 0, 573, 574, 5, 89, 0, 0, 574, 579, 3, 118, 59, 0, 575, 576, 5, 34, 0, 0, 576, 578, 3, 118, 59, 0, 577, 575, 1, 0, 0, 0, 578, 581, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 583, 1, 0, 0, 0, 581, 579, 1, 0, 0, 0, 582, 573, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 117, 1, 0, 0, 0, 584, 585, 3, 60, 30, 0, 585, 586, 5, 32, 0, 0, 586, 588, 1, 0, 0, 0, 587, 584, 1, 0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 3, 60, 30, 0, 590, 119, 1, 0, 0, 0, 591, 592, 5, 18, 0, 0, 592, 593, 3, 36, 18, 0, 593, 594, 5, 88, 0, 0, 594, 595, 3, 62, 31, 0, 595, 121, 1, 0, 0, 0, 596, 597, 5, 17, 0, 0, 597, 600, 3, 54, 27, 0, 598, 599, 5, 29, 0, 0, 599, 601, 3, 30, 15, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 123, 1, 0, 0, 0, 58, 135, 144, 162, 174, 183, 191, 197, 205, 207, 212, 219, 224, 235, 241, 249, 251, 262, 269, 280, 283, 299, 305, 315, 319, 324, 334, 342, 355, 359, 363, 370, 374, 381, 387, 394, 402, 410, 418, 435, 446, 457, 462, 466, 471, 482, 487, 491, 505, 516, 530, 541, 544, 549, 571, 579, 582, 587, 600] \ No newline at end of file +[4, 1, 128, 635, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 245, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 251, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 259, 8, 9, 10, 9, 12, 9, 262, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 272, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 277, 8, 10, 10, 10, 12, 10, 280, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 288, 8, 11, 10, 11, 12, 11, 291, 9, 11, 3, 11, 293, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 307, 8, 15, 10, 15, 12, 15, 310, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 315, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 323, 8, 17, 10, 17, 12, 17, 326, 9, 17, 1, 17, 3, 17, 329, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 334, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 344, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 350, 8, 22, 10, 22, 12, 22, 353, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 363, 8, 24, 10, 24, 12, 24, 366, 9, 24, 1, 24, 3, 24, 369, 8, 24, 1, 24, 1, 24, 3, 24, 373, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 380, 8, 26, 1, 26, 1, 26, 3, 26, 384, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 389, 8, 27, 10, 27, 12, 27, 392, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 397, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 402, 8, 29, 10, 29, 12, 29, 405, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 410, 8, 30, 10, 30, 12, 30, 413, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 418, 8, 31, 10, 31, 12, 31, 421, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 428, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 454, 8, 34, 10, 34, 12, 34, 457, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 465, 8, 34, 10, 34, 12, 34, 468, 9, 34, 1, 34, 1, 34, 3, 34, 472, 8, 34, 1, 35, 1, 35, 3, 35, 476, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 481, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 490, 8, 38, 10, 38, 12, 38, 493, 9, 38, 1, 39, 1, 39, 3, 39, 497, 8, 39, 1, 39, 1, 39, 3, 39, 501, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 513, 8, 42, 10, 42, 12, 42, 516, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 526, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 538, 8, 47, 10, 47, 12, 47, 541, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 551, 8, 50, 1, 51, 3, 51, 554, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 559, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 581, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 587, 8, 58, 10, 58, 12, 58, 590, 9, 58, 3, 58, 592, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 597, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 610, 8, 61, 1, 62, 3, 62, 613, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 622, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 628, 8, 64, 10, 64, 12, 64, 631, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 660, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 244, 1, 0, 0, 0, 18, 250, 1, 0, 0, 0, 20, 271, 1, 0, 0, 0, 22, 281, 1, 0, 0, 0, 24, 296, 1, 0, 0, 0, 26, 298, 1, 0, 0, 0, 28, 300, 1, 0, 0, 0, 30, 303, 1, 0, 0, 0, 32, 314, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 333, 1, 0, 0, 0, 38, 337, 1, 0, 0, 0, 40, 339, 1, 0, 0, 0, 42, 343, 1, 0, 0, 0, 44, 345, 1, 0, 0, 0, 46, 354, 1, 0, 0, 0, 48, 358, 1, 0, 0, 0, 50, 374, 1, 0, 0, 0, 52, 377, 1, 0, 0, 0, 54, 385, 1, 0, 0, 0, 56, 393, 1, 0, 0, 0, 58, 398, 1, 0, 0, 0, 60, 406, 1, 0, 0, 0, 62, 414, 1, 0, 0, 0, 64, 422, 1, 0, 0, 0, 66, 427, 1, 0, 0, 0, 68, 471, 1, 0, 0, 0, 70, 475, 1, 0, 0, 0, 72, 480, 1, 0, 0, 0, 74, 482, 1, 0, 0, 0, 76, 485, 1, 0, 0, 0, 78, 494, 1, 0, 0, 0, 80, 502, 1, 0, 0, 0, 82, 505, 1, 0, 0, 0, 84, 508, 1, 0, 0, 0, 86, 517, 1, 0, 0, 0, 88, 521, 1, 0, 0, 0, 90, 527, 1, 0, 0, 0, 92, 531, 1, 0, 0, 0, 94, 534, 1, 0, 0, 0, 96, 542, 1, 0, 0, 0, 98, 546, 1, 0, 0, 0, 100, 550, 1, 0, 0, 0, 102, 553, 1, 0, 0, 0, 104, 558, 1, 0, 0, 0, 106, 562, 1, 0, 0, 0, 108, 564, 1, 0, 0, 0, 110, 566, 1, 0, 0, 0, 112, 569, 1, 0, 0, 0, 114, 573, 1, 0, 0, 0, 116, 576, 1, 0, 0, 0, 118, 596, 1, 0, 0, 0, 120, 600, 1, 0, 0, 0, 122, 605, 1, 0, 0, 0, 124, 612, 1, 0, 0, 0, 126, 618, 1, 0, 0, 0, 128, 623, 1, 0, 0, 0, 130, 632, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 236, 3, 58, 29, 0, 236, 237, 5, 38, 0, 0, 237, 238, 3, 68, 34, 0, 238, 15, 1, 0, 0, 0, 239, 245, 3, 18, 9, 0, 240, 241, 3, 18, 9, 0, 241, 242, 3, 108, 54, 0, 242, 243, 3, 18, 9, 0, 243, 245, 1, 0, 0, 0, 244, 239, 1, 0, 0, 0, 244, 240, 1, 0, 0, 0, 245, 17, 1, 0, 0, 0, 246, 247, 6, 9, -1, 0, 247, 251, 3, 20, 10, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 3, 250, 246, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 251, 260, 1, 0, 0, 0, 252, 253, 10, 2, 0, 0, 253, 254, 7, 1, 0, 0, 254, 259, 3, 18, 9, 3, 255, 256, 10, 1, 0, 0, 256, 257, 7, 0, 0, 0, 257, 259, 3, 18, 9, 2, 258, 252, 1, 0, 0, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 19, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 6, 10, -1, 0, 264, 272, 3, 68, 34, 0, 265, 272, 3, 58, 29, 0, 266, 272, 3, 22, 11, 0, 267, 268, 5, 48, 0, 0, 268, 269, 3, 10, 5, 0, 269, 270, 5, 55, 0, 0, 270, 272, 1, 0, 0, 0, 271, 263, 1, 0, 0, 0, 271, 265, 1, 0, 0, 0, 271, 266, 1, 0, 0, 0, 271, 267, 1, 0, 0, 0, 272, 278, 1, 0, 0, 0, 273, 274, 10, 1, 0, 0, 274, 275, 5, 37, 0, 0, 275, 277, 3, 26, 13, 0, 276, 273, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 21, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 3, 24, 12, 0, 282, 292, 5, 48, 0, 0, 283, 293, 5, 66, 0, 0, 284, 289, 3, 10, 5, 0, 285, 286, 5, 39, 0, 0, 286, 288, 3, 10, 5, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 283, 1, 0, 0, 0, 292, 284, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 5, 55, 0, 0, 295, 23, 1, 0, 0, 0, 296, 297, 3, 72, 36, 0, 297, 25, 1, 0, 0, 0, 298, 299, 3, 64, 32, 0, 299, 27, 1, 0, 0, 0, 300, 301, 5, 12, 0, 0, 301, 302, 3, 30, 15, 0, 302, 29, 1, 0, 0, 0, 303, 308, 3, 32, 16, 0, 304, 305, 5, 39, 0, 0, 305, 307, 3, 32, 16, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 31, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 3, 58, 29, 0, 312, 313, 5, 36, 0, 0, 313, 315, 1, 0, 0, 0, 314, 311, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 3, 10, 5, 0, 317, 33, 1, 0, 0, 0, 318, 319, 5, 6, 0, 0, 319, 324, 3, 36, 18, 0, 320, 321, 5, 39, 0, 0, 321, 323, 3, 36, 18, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 42, 21, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 35, 1, 0, 0, 0, 330, 331, 3, 38, 19, 0, 331, 332, 5, 38, 0, 0, 332, 334, 1, 0, 0, 0, 333, 330, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 3, 40, 20, 0, 336, 37, 1, 0, 0, 0, 337, 338, 5, 81, 0, 0, 338, 39, 1, 0, 0, 0, 339, 340, 7, 2, 0, 0, 340, 41, 1, 0, 0, 0, 341, 344, 3, 44, 22, 0, 342, 344, 3, 46, 23, 0, 343, 341, 1, 0, 0, 0, 343, 342, 1, 0, 0, 0, 344, 43, 1, 0, 0, 0, 345, 346, 5, 80, 0, 0, 346, 351, 5, 81, 0, 0, 347, 348, 5, 39, 0, 0, 348, 350, 5, 81, 0, 0, 349, 347, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 45, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 354, 355, 5, 70, 0, 0, 355, 356, 3, 44, 22, 0, 356, 357, 5, 71, 0, 0, 357, 47, 1, 0, 0, 0, 358, 359, 5, 19, 0, 0, 359, 364, 3, 36, 18, 0, 360, 361, 5, 39, 0, 0, 361, 363, 3, 36, 18, 0, 362, 360, 1, 0, 0, 0, 363, 366, 1, 0, 0, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 368, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 367, 369, 3, 54, 27, 0, 368, 367, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 371, 5, 33, 0, 0, 371, 373, 3, 30, 15, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 49, 1, 0, 0, 0, 374, 375, 5, 4, 0, 0, 375, 376, 3, 30, 15, 0, 376, 51, 1, 0, 0, 0, 377, 379, 5, 15, 0, 0, 378, 380, 3, 54, 27, 0, 379, 378, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 382, 5, 33, 0, 0, 382, 384, 3, 30, 15, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 53, 1, 0, 0, 0, 385, 390, 3, 56, 28, 0, 386, 387, 5, 39, 0, 0, 387, 389, 3, 56, 28, 0, 388, 386, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 55, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 393, 396, 3, 32, 16, 0, 394, 395, 5, 16, 0, 0, 395, 397, 3, 10, 5, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 57, 1, 0, 0, 0, 398, 403, 3, 72, 36, 0, 399, 400, 5, 41, 0, 0, 400, 402, 3, 72, 36, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 59, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 411, 3, 66, 33, 0, 407, 408, 5, 41, 0, 0, 408, 410, 3, 66, 33, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 61, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 419, 3, 60, 30, 0, 415, 416, 5, 39, 0, 0, 416, 418, 3, 60, 30, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 63, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 423, 7, 3, 0, 0, 423, 65, 1, 0, 0, 0, 424, 428, 5, 85, 0, 0, 425, 426, 4, 33, 10, 0, 426, 428, 3, 70, 35, 0, 427, 424, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 428, 67, 1, 0, 0, 0, 429, 472, 5, 50, 0, 0, 430, 431, 3, 104, 52, 0, 431, 432, 5, 72, 0, 0, 432, 472, 1, 0, 0, 0, 433, 472, 3, 102, 51, 0, 434, 472, 3, 104, 52, 0, 435, 472, 3, 98, 49, 0, 436, 472, 3, 70, 35, 0, 437, 472, 3, 106, 53, 0, 438, 439, 5, 70, 0, 0, 439, 444, 3, 100, 50, 0, 440, 441, 5, 39, 0, 0, 441, 443, 3, 100, 50, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 447, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 71, 0, 0, 448, 472, 1, 0, 0, 0, 449, 450, 5, 70, 0, 0, 450, 455, 3, 98, 49, 0, 451, 452, 5, 39, 0, 0, 452, 454, 3, 98, 49, 0, 453, 451, 1, 0, 0, 0, 454, 457, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 458, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 458, 459, 5, 71, 0, 0, 459, 472, 1, 0, 0, 0, 460, 461, 5, 70, 0, 0, 461, 466, 3, 106, 53, 0, 462, 463, 5, 39, 0, 0, 463, 465, 3, 106, 53, 0, 464, 462, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 469, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 470, 5, 71, 0, 0, 470, 472, 1, 0, 0, 0, 471, 429, 1, 0, 0, 0, 471, 430, 1, 0, 0, 0, 471, 433, 1, 0, 0, 0, 471, 434, 1, 0, 0, 0, 471, 435, 1, 0, 0, 0, 471, 436, 1, 0, 0, 0, 471, 437, 1, 0, 0, 0, 471, 438, 1, 0, 0, 0, 471, 449, 1, 0, 0, 0, 471, 460, 1, 0, 0, 0, 472, 69, 1, 0, 0, 0, 473, 476, 5, 53, 0, 0, 474, 476, 5, 69, 0, 0, 475, 473, 1, 0, 0, 0, 475, 474, 1, 0, 0, 0, 476, 71, 1, 0, 0, 0, 477, 481, 3, 64, 32, 0, 478, 479, 4, 36, 11, 0, 479, 481, 3, 70, 35, 0, 480, 477, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 73, 1, 0, 0, 0, 482, 483, 5, 9, 0, 0, 483, 484, 5, 31, 0, 0, 484, 75, 1, 0, 0, 0, 485, 486, 5, 14, 0, 0, 486, 491, 3, 78, 39, 0, 487, 488, 5, 39, 0, 0, 488, 490, 3, 78, 39, 0, 489, 487, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 77, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 494, 496, 3, 10, 5, 0, 495, 497, 7, 4, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 500, 1, 0, 0, 0, 498, 499, 5, 51, 0, 0, 499, 501, 7, 5, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 79, 1, 0, 0, 0, 502, 503, 5, 8, 0, 0, 503, 504, 3, 62, 31, 0, 504, 81, 1, 0, 0, 0, 505, 506, 5, 2, 0, 0, 506, 507, 3, 62, 31, 0, 507, 83, 1, 0, 0, 0, 508, 509, 5, 11, 0, 0, 509, 514, 3, 86, 43, 0, 510, 511, 5, 39, 0, 0, 511, 513, 3, 86, 43, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 85, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 518, 3, 60, 30, 0, 518, 519, 5, 89, 0, 0, 519, 520, 3, 60, 30, 0, 520, 87, 1, 0, 0, 0, 521, 522, 5, 1, 0, 0, 522, 523, 3, 20, 10, 0, 523, 525, 3, 106, 53, 0, 524, 526, 3, 94, 47, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 89, 1, 0, 0, 0, 527, 528, 5, 7, 0, 0, 528, 529, 3, 20, 10, 0, 529, 530, 3, 106, 53, 0, 530, 91, 1, 0, 0, 0, 531, 532, 5, 10, 0, 0, 532, 533, 3, 58, 29, 0, 533, 93, 1, 0, 0, 0, 534, 539, 3, 96, 48, 0, 535, 536, 5, 39, 0, 0, 536, 538, 3, 96, 48, 0, 537, 535, 1, 0, 0, 0, 538, 541, 1, 0, 0, 0, 539, 537, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 95, 1, 0, 0, 0, 541, 539, 1, 0, 0, 0, 542, 543, 3, 64, 32, 0, 543, 544, 5, 36, 0, 0, 544, 545, 3, 68, 34, 0, 545, 97, 1, 0, 0, 0, 546, 547, 7, 6, 0, 0, 547, 99, 1, 0, 0, 0, 548, 551, 3, 102, 51, 0, 549, 551, 3, 104, 52, 0, 550, 548, 1, 0, 0, 0, 550, 549, 1, 0, 0, 0, 551, 101, 1, 0, 0, 0, 552, 554, 7, 0, 0, 0, 553, 552, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 5, 32, 0, 0, 556, 103, 1, 0, 0, 0, 557, 559, 7, 0, 0, 0, 558, 557, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 5, 31, 0, 0, 561, 105, 1, 0, 0, 0, 562, 563, 5, 30, 0, 0, 563, 107, 1, 0, 0, 0, 564, 565, 7, 7, 0, 0, 565, 109, 1, 0, 0, 0, 566, 567, 5, 5, 0, 0, 567, 568, 3, 112, 56, 0, 568, 111, 1, 0, 0, 0, 569, 570, 5, 70, 0, 0, 570, 571, 3, 2, 1, 0, 571, 572, 5, 71, 0, 0, 572, 113, 1, 0, 0, 0, 573, 574, 5, 13, 0, 0, 574, 575, 5, 105, 0, 0, 575, 115, 1, 0, 0, 0, 576, 577, 5, 3, 0, 0, 577, 580, 5, 95, 0, 0, 578, 579, 5, 93, 0, 0, 579, 581, 3, 60, 30, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 591, 1, 0, 0, 0, 582, 583, 5, 94, 0, 0, 583, 588, 3, 118, 59, 0, 584, 585, 5, 39, 0, 0, 585, 587, 3, 118, 59, 0, 586, 584, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 592, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 582, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 117, 1, 0, 0, 0, 593, 594, 3, 60, 30, 0, 594, 595, 5, 36, 0, 0, 595, 597, 1, 0, 0, 0, 596, 593, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 3, 60, 30, 0, 599, 119, 1, 0, 0, 0, 600, 601, 5, 18, 0, 0, 601, 602, 3, 36, 18, 0, 602, 603, 5, 93, 0, 0, 603, 604, 3, 62, 31, 0, 604, 121, 1, 0, 0, 0, 605, 606, 5, 17, 0, 0, 606, 609, 3, 54, 27, 0, 607, 608, 5, 33, 0, 0, 608, 610, 3, 30, 15, 0, 609, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 123, 1, 0, 0, 0, 611, 613, 7, 8, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 5, 20, 0, 0, 615, 616, 3, 126, 63, 0, 616, 617, 3, 128, 64, 0, 617, 125, 1, 0, 0, 0, 618, 621, 3, 64, 32, 0, 619, 620, 5, 89, 0, 0, 620, 622, 3, 64, 32, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 127, 1, 0, 0, 0, 623, 624, 5, 93, 0, 0, 624, 629, 3, 130, 65, 0, 625, 626, 5, 39, 0, 0, 626, 628, 3, 130, 65, 0, 627, 625, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 129, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 632, 633, 3, 16, 8, 0, 633, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 244, 250, 258, 260, 271, 278, 289, 292, 308, 314, 324, 328, 333, 343, 351, 364, 368, 372, 379, 383, 390, 396, 403, 411, 419, 427, 444, 455, 466, 471, 475, 480, 491, 496, 500, 514, 525, 539, 550, 553, 558, 580, 588, 591, 596, 609, 612, 621, 629] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e36184b1f07da..e864eaff3edd7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -8,26 +8,14 @@ * 2.0. */ -import org.antlr.v4.runtime.FailedPredicateException; -import org.antlr.v4.runtime.NoViableAltException; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.RuleContext; -import org.antlr.v4.runtime.RuntimeMetaData; -import org.antlr.v4.runtime.Token; -import org.antlr.v4.runtime.TokenStream; -import org.antlr.v4.runtime.Vocabulary; -import org.antlr.v4.runtime.VocabularyImpl; -import org.antlr.v4.runtime.atn.ATN; -import org.antlr.v4.runtime.atn.ATNDeserializer; -import org.antlr.v4.runtime.atn.ParserATNSimulator; -import org.antlr.v4.runtime.atn.PredictionContextCache; +import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; -import org.antlr.v4.runtime.tree.ParseTreeListener; -import org.antlr.v4.runtime.tree.ParseTreeVisitor; -import org.antlr.v4.runtime.tree.TerminalNode; - +import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; +import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue"}) public class EsqlBaseParser extends ParserConfig { @@ -37,113 +25,120 @@ public class EsqlBaseParser extends ParserConfig { protected static final PredictionContextCache _sharedContextCache = new PredictionContextCache(); public static final int - DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, - WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, UNKNOWN_CMD=20, - LINE_COMMENT=21, MULTILINE_COMMENT=22, WS=23, COLON=24, PIPE=25, QUOTED_STRING=26, - INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, ASC=31, ASSIGN=32, - CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, IN=39, IS=40, - LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, RLIKE=49, - RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, GTE=58, - PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, - EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, - FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, - PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, - AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, - ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, - ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, - ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, - MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, - SHOW_WS=103, SETTING=104, SETTING_LINE_COMMENT=105, SETTTING_MULTILINE_COMMENT=106, - SETTING_WS=107, LOOKUP_LINE_COMMENT=108, LOOKUP_MULTILINE_COMMENT=109, - LOOKUP_WS=110, LOOKUP_FIELD_LINE_COMMENT=111, LOOKUP_FIELD_MULTILINE_COMMENT=112, - LOOKUP_FIELD_WS=113, METRICS_LINE_COMMENT=114, METRICS_MULTILINE_COMMENT=115, - METRICS_WS=116, CLOSING_METRICS_LINE_COMMENT=117, CLOSING_METRICS_MULTILINE_COMMENT=118, - CLOSING_METRICS_WS=119; + DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, + DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, + OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, + LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, + NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, + UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, + EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, + METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, + FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, + PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, + RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, + ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, + ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, + MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, + SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, + SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, + LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, + LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, + JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, + METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, + CLOSING_METRICS_WS=128; public static final int - RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, - RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, - RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, - RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, - RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, - RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, - RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, - RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, - RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, - RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, - RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, - RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, - RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, - RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, - RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, - RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, - RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, - RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, - RULE_inlinestatsCommand = 61; + RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, + RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, + RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, + RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, + RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, + RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, + RULE_indexString = 20, RULE_metadata = 21, RULE_metadataOption = 22, RULE_deprecated_metadata = 23, + RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, + RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, + RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, + RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, + RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, + RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, + RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, + RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, + RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, + RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, + RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, + RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, + RULE_joinCondition = 64, RULE_joinPredicate = 65; private static String[] makeRuleNames() { return new String[] { - "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", - "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", - "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", - "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", - "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", - "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", - "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", - "inlinestatsCommand" + "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", + "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", + "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", + "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", + "indexPattern", "clusterString", "indexString", "metadata", "metadataOption", + "deprecated_metadata", "metricsCommand", "evalCommand", "statsCommand", + "aggFields", "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", + "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", + "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" }; } public static final String[] ruleNames = makeRuleNames(); private static String[] makeLiteralNames() { return new String[] { - null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", - "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - "':'", "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", - "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", - "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", - "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'" + null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", + "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", + "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, + "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, null, null, null, null, null, null, "'as'", null, null, null, + "'on'", "'with'", null, null, null, null, null, null, null, null, null, + null, "'info'", null, null, null, null, null, null, null, null, null, + null, null, null, null, "'USING'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { - null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "QUOTED_STRING", - "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", - "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", + "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } @@ -231,9 +226,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(124); + setState(132); query(0); - setState(125); + setState(133); match(EOF); } } @@ -255,7 +250,7 @@ public QueryContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_query; } - + @SuppressWarnings("this-escape") public QueryContext() { } public void copyFrom(QueryContext ctx) { @@ -329,11 +324,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(128); + setState(136); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(135); + setState(143); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -344,16 +339,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(130); + setState(138); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(131); + setState(139); match(PIPE); - setState(132); + setState(140); processingCommand(); } - } + } } - setState(137); + setState(145); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -411,43 +406,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(144); + setState(152); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(138); + setState(146); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(139); + setState(147); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(140); + setState(148); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(141); + setState(149); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(142); + setState(150); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(143); + setState(151); metricsCommand(); } break; @@ -508,6 +503,9 @@ public InlinestatsCommandContext inlinestatsCommand() { public LookupCommandContext lookupCommand() { return getRuleContext(LookupCommandContext.class,0); } + public JoinCommandContext joinCommand() { + return getRuleContext(JoinCommandContext.class,0); + } @SuppressWarnings("this-escape") public ProcessingCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -532,111 +530,120 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(162); + setState(172); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(146); + setState(154); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(147); + setState(155); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(148); + setState(156); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(149); + setState(157); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(150); + setState(158); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(151); + setState(159); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(152); + setState(160); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(153); + setState(161); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(154); + setState(162); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(155); + setState(163); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(156); + setState(164); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(157); + setState(165); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(158); + setState(166); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(159); + setState(167); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(160); + setState(168); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(161); + setState(169); lookupCommand(); } break; + case 15: + enterOuterAlt(_localctx, 15); + { + setState(170); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(171); + joinCommand(); + } + break; } } catch (RecognitionException re) { @@ -682,9 +689,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(164); + setState(174); match(WHERE); - setState(165); + setState(175); booleanExpression(0); } } @@ -706,7 +713,7 @@ public BooleanExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_booleanExpression; } - + @SuppressWarnings("this-escape") public BooleanExpressionContext() { } public void copyFrom(BooleanExpressionContext ctx) { @@ -900,7 +907,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(197); + setState(206); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -909,9 +916,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(178); match(NOT); - setState(169); + setState(179); booleanExpression(8); } break; @@ -920,7 +927,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(170); + setState(180); valueExpression(); } break; @@ -929,7 +936,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(171); + setState(181); regexBooleanExpression(); } break; @@ -938,41 +945,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(172); + setState(182); valueExpression(); - setState(174); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(173); + setState(183); match(NOT); } } - setState(176); + setState(186); match(IN); - setState(177); + setState(187); match(LP); - setState(178); + setState(188); valueExpression(); - setState(183); + setState(193); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(179); + setState(189); match(COMMA); - setState(180); + setState(190); valueExpression(); } } - setState(185); + setState(195); _errHandler.sync(this); _la = _input.LA(1); } - setState(186); + setState(196); match(RP); } break; @@ -981,21 +988,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(188); + setState(198); valueExpression(); - setState(189); + setState(199); match(IS); - setState(191); + setState(201); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(190); + setState(200); match(NOT); } } - setState(193); + setState(203); match(NULL); } break; @@ -1004,15 +1011,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(195); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(196); + setState(205); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(207); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1020,7 +1025,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(205); + setState(214); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1028,11 +1033,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(199); + setState(208); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(200); + setState(209); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(201); + setState(210); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1041,18 +1046,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(202); + setState(211); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(203); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(204); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } - } + } } - setState(209); + setState(218); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1107,48 +1112,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(224); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(210); + setState(219); valueExpression(); - setState(212); + setState(221); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(211); + setState(220); match(NOT); } } - setState(214); + setState(223); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(215); + setState(224); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(217); + setState(226); valueExpression(); - setState(219); + setState(228); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(218); + setState(227); match(NOT); } } - setState(221); + setState(230); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(222); + setState(231); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1202,11 +1207,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(226); + setState(235); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(227); + setState(236); match(COLON); - setState(228); + setState(237); ((MatchBooleanExpressionContext)_localctx).queryString = constant(); } } @@ -1228,7 +1233,7 @@ public ValueExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_valueExpression; } - + @SuppressWarnings("this-escape") public ValueExpressionContext() { } public void copyFrom(ValueExpressionContext ctx) { @@ -1290,14 +1295,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(235); + setState(244); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(230); + setState(239); operatorExpression(0); } break; @@ -1305,11 +1310,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(231); + setState(240); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(232); + setState(241); comparisonOperator(); - setState(233); + setState(242); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1333,7 +1338,7 @@ public OperatorExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_operatorExpression; } - + @SuppressWarnings("this-escape") public OperatorExpressionContext() { } public void copyFrom(OperatorExpressionContext ctx) { @@ -1434,7 +1439,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(241); + setState(250); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1443,7 +1448,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(238); + setState(247); primaryExpression(0); } break; @@ -1452,7 +1457,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(239); + setState(248); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1463,13 +1468,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(240); + setState(249); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(251); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1477,7 +1482,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(249); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1485,12 +1490,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(243); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(244); + setState(253); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { + if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1498,7 +1503,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(245); + setState(254); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1507,9 +1512,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(246); + setState(255); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(247); + setState(256); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1520,14 +1525,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(248); + setState(257); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } - } + } } - setState(253); + setState(262); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1551,7 +1556,7 @@ public PrimaryExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_primaryExpression; } - + @SuppressWarnings("this-escape") public PrimaryExpressionContext() { } public void copyFrom(PrimaryExpressionContext ctx) { @@ -1685,7 +1690,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(262); + setState(271); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1694,7 +1699,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(255); + setState(264); constant(); } break; @@ -1703,7 +1708,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(256); + setState(265); qualifiedName(); } break; @@ -1712,7 +1717,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(257); + setState(266); functionExpression(); } break; @@ -1721,17 +1726,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(258); + setState(267); match(LP); - setState(259); + setState(268); booleanExpression(0); - setState(260); + setState(269); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(269); + setState(278); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1742,16 +1747,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(264); + setState(273); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(265); + setState(274); match(CAST_OP); - setState(266); + setState(275); dataType(); } - } + } } - setState(271); + setState(280); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1813,37 +1818,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(272); + setState(281); functionName(); - setState(273); + setState(282); match(LP); - setState(283); + setState(292); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(274); + setState(283); match(ASTERISK); } break; case 2: { { - setState(275); + setState(284); booleanExpression(0); - setState(280); + setState(289); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(276); + setState(285); match(COMMA); - setState(277); + setState(286); booleanExpression(0); } } - setState(282); + setState(291); _errHandler.sync(this); _la = _input.LA(1); } @@ -1851,7 +1856,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(285); + setState(294); match(RP); } } @@ -1897,7 +1902,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(287); + setState(296); identifierOrParameter(); } } @@ -1919,7 +1924,7 @@ public DataTypeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_dataType; } - + @SuppressWarnings("this-escape") public DataTypeContext() { } public void copyFrom(DataTypeContext ctx) { @@ -1955,7 +1960,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(289); + setState(298); identifier(); } } @@ -2002,9 +2007,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(291); + setState(300); match(ROW); - setState(292); + setState(301); fields(); } } @@ -2058,23 +2063,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(294); + setState(303); field(); - setState(299); + setState(308); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(295); + setState(304); match(COMMA); - setState(296); + setState(305); field(); } - } + } } - setState(301); + setState(310); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2126,19 +2131,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(305); + setState(314); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(302); + setState(311); qualifiedName(); - setState(303); + setState(312); match(ASSIGN); } break; } - setState(307); + setState(316); booleanExpression(0); } } @@ -2196,34 +2201,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(309); + setState(318); match(FROM); - setState(310); + setState(319); indexPattern(); - setState(315); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(311); + setState(320); match(COMMA); - setState(312); + setState(321); indexPattern(); } - } + } } - setState(317); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(319); + setState(328); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(318); + setState(327); metadata(); } break; @@ -2276,19 +2281,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(333); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(321); + setState(330); clusterString(); - setState(322); + setState(331); match(COLON); } break; } - setState(326); + setState(335); indexString(); } } @@ -2332,7 +2337,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(337); match(UNQUOTED_SOURCE); } } @@ -2378,7 +2383,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(330); + setState(339); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2433,20 +2438,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(334); + setState(343); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(332); + setState(341); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(333); + setState(342); deprecated_metadata(); } break; @@ -2503,25 +2508,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(336); + setState(345); match(METADATA); - setState(337); + setState(346); match(UNQUOTED_SOURCE); - setState(342); + setState(351); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(338); + setState(347); match(COMMA); - setState(339); + setState(348); match(UNQUOTED_SOURCE); } - } + } } - setState(344); + setState(353); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2570,11 +2575,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(345); + setState(354); match(OPENING_BRACKET); - setState(346); + setState(355); metadataOption(); - setState(347); + setState(356); match(CLOSING_BRACKET); } } @@ -2638,46 +2643,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(349); + setState(358); match(DEV_METRICS); - setState(350); + setState(359); indexPattern(); - setState(355); + setState(364); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(351); + setState(360); match(COMMA); - setState(352); + setState(361); indexPattern(); } - } + } } - setState(357); + setState(366); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(359); + setState(368); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(358); + setState(367); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(363); + setState(372); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(361); + setState(370); match(BY); - setState(362); + setState(371); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2727,9 +2732,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(365); + setState(374); match(EVAL); - setState(366); + setState(375); fields(); } } @@ -2782,26 +2787,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(368); + setState(377); match(STATS); - setState(370); + setState(379); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(369); + setState(378); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(374); + setState(383); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(372); + setState(381); match(BY); - setState(373); + setState(382); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2858,23 +2863,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(376); + setState(385); aggField(); - setState(381); + setState(390); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(377); + setState(386); match(COMMA); - setState(378); + setState(387); aggField(); } - } + } } - setState(383); + setState(392); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2926,16 +2931,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(384); + setState(393); field(); - setState(387); + setState(396); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(385); + setState(394); match(WHERE); - setState(386); + setState(395); booleanExpression(0); } break; @@ -2992,23 +2997,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(389); + setState(398); identifierOrParameter(); - setState(394); + setState(403); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(390); + setState(399); match(DOT); - setState(391); + setState(400); identifierOrParameter(); } - } + } } - setState(396); + setState(405); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3064,23 +3069,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(397); + setState(406); identifierPattern(); - setState(402); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(398); + setState(407); match(DOT); - setState(399); + setState(408); identifierPattern(); } - } + } } - setState(404); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3136,23 +3141,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(405); + setState(414); qualifiedNamePattern(); - setState(410); + setState(419); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(406); + setState(415); match(COMMA); - setState(407); + setState(416); qualifiedNamePattern(); } - } + } } - setState(412); + setState(421); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3200,7 +3205,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(413); + setState(422); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3253,22 +3258,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(418); + setState(427); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(415); + setState(424); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(416); + setState(425); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(417); + setState(426); parameter(); } break; @@ -3292,7 +3297,7 @@ public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_constant; } - + @SuppressWarnings("this-escape") public ConstantContext() { } public void copyFrom(ConstantContext ctx) { @@ -3541,14 +3546,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(462); + setState(471); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(420); + setState(429); match(NULL); } break; @@ -3556,9 +3561,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(421); + setState(430); integerValue(); - setState(422); + setState(431); match(UNQUOTED_IDENTIFIER); } break; @@ -3566,7 +3571,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(424); + setState(433); decimalValue(); } break; @@ -3574,7 +3579,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(425); + setState(434); integerValue(); } break; @@ -3582,7 +3587,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(426); + setState(435); booleanValue(); } break; @@ -3590,7 +3595,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(427); + setState(436); parameter(); } break; @@ -3598,7 +3603,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(428); + setState(437); string(); } break; @@ -3606,27 +3611,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(429); + setState(438); match(OPENING_BRACKET); - setState(430); + setState(439); numericValue(); - setState(435); + setState(444); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(431); + setState(440); match(COMMA); - setState(432); + setState(441); numericValue(); } } - setState(437); + setState(446); _errHandler.sync(this); _la = _input.LA(1); } - setState(438); + setState(447); match(CLOSING_BRACKET); } break; @@ -3634,27 +3639,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(440); + setState(449); match(OPENING_BRACKET); - setState(441); + setState(450); booleanValue(); - setState(446); + setState(455); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(442); + setState(451); match(COMMA); - setState(443); + setState(452); booleanValue(); } } - setState(448); + setState(457); _errHandler.sync(this); _la = _input.LA(1); } - setState(449); + setState(458); match(CLOSING_BRACKET); } break; @@ -3662,27 +3667,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(451); + setState(460); match(OPENING_BRACKET); - setState(452); + setState(461); string(); - setState(457); + setState(466); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(453); + setState(462); match(COMMA); - setState(454); + setState(463); string(); } } - setState(459); + setState(468); _errHandler.sync(this); _la = _input.LA(1); } - setState(460); + setState(469); match(CLOSING_BRACKET); } break; @@ -3706,7 +3711,7 @@ public ParameterContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_parameter; } - + @SuppressWarnings("this-escape") public ParameterContext() { } public void copyFrom(ParameterContext ctx) { @@ -3756,14 +3761,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(466); + setState(475); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(464); + setState(473); match(PARAM); } break; @@ -3771,7 +3776,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(465); + setState(474); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3822,22 +3827,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(471); + setState(480); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(468); + setState(477); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(469); + setState(478); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(470); + setState(479); parameter(); } break; @@ -3884,9 +3889,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(482); match(LIMIT); - setState(474); + setState(483); match(INTEGER_LITERAL); } } @@ -3941,25 +3946,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(476); + setState(485); match(SORT); - setState(477); + setState(486); orderExpression(); - setState(482); + setState(491); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(478); + setState(487); match(COMMA); - setState(479); + setState(488); orderExpression(); } - } + } } - setState(484); + setState(493); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } @@ -4015,14 +4020,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(494); booleanExpression(0); - setState(487); + setState(496); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(486); + setState(495); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4036,14 +4041,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(491); + setState(500); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(489); + setState(498); match(NULLS); - setState(490); + setState(499); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4102,9 +4107,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(493); + setState(502); match(KEEP); - setState(494); + setState(503); qualifiedNamePatterns(); } } @@ -4151,9 +4156,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(496); + setState(505); match(DROP); - setState(497); + setState(506); qualifiedNamePatterns(); } } @@ -4208,25 +4213,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(499); + setState(508); match(RENAME); - setState(500); + setState(509); renameClause(); - setState(505); + setState(514); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(501); + setState(510); match(COMMA); - setState(502); + setState(511); renameClause(); } - } + } } - setState(507); + setState(516); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } @@ -4280,11 +4285,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(517); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(509); + setState(518); match(AS); - setState(510); + setState(519); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4337,18 +4342,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(512); + setState(521); match(DISSECT); - setState(513); + setState(522); primaryExpression(0); - setState(514); + setState(523); string(); - setState(516); + setState(525); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(515); + setState(524); commandOptions(); } break; @@ -4401,11 +4406,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(527); match(GROK); - setState(519); + setState(528); primaryExpression(0); - setState(520); + setState(529); string(); } } @@ -4452,9 +4457,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(531); match(MV_EXPAND); - setState(523); + setState(532); qualifiedName(); } } @@ -4508,23 +4513,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(525); + setState(534); commandOption(); - setState(530); + setState(539); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(535); match(COMMA); - setState(527); + setState(536); commandOption(); } - } + } } - setState(532); + setState(541); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4576,11 +4581,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(533); + setState(542); identifier(); - setState(534); + setState(543); match(ASSIGN); - setState(535); + setState(544); constant(); } } @@ -4626,7 +4631,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(537); + setState(546); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4681,20 +4686,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(541); + setState(550); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(539); + setState(548); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(540); + setState(549); integerValue(); } break; @@ -4743,12 +4748,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(544); + setState(553); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(543); + setState(552); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4761,7 +4766,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(546); + setState(555); match(DECIMAL_LITERAL); } } @@ -4808,12 +4813,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(549); + setState(558); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(548); + setState(557); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4826,7 +4831,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(551); + setState(560); match(INTEGER_LITERAL); } } @@ -4870,7 +4875,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(553); + setState(562); match(QUOTED_STRING); } } @@ -4920,9 +4925,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(555); + setState(564); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4975,9 +4980,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(557); + setState(566); match(EXPLAIN); - setState(558); + setState(567); subqueryExpression(); } } @@ -5025,11 +5030,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(560); + setState(569); match(OPENING_BRACKET); - setState(561); + setState(570); query(0); - setState(562); + setState(571); match(CLOSING_BRACKET); } } @@ -5051,7 +5056,7 @@ public ShowCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @Override public int getRuleIndex() { return RULE_showCommand; } - + @SuppressWarnings("this-escape") public ShowCommandContext() { } public void copyFrom(ShowCommandContext ctx) { @@ -5086,9 +5091,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(564); + setState(573); match(SHOW); - setState(565); + setState(574); match(INFO); } } @@ -5151,46 +5156,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(567); + setState(576); match(ENRICH); - setState(568); + setState(577); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(571); + setState(580); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(569); + setState(578); match(ON); - setState(570); + setState(579); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(582); + setState(591); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(573); + setState(582); match(WITH); - setState(574); + setState(583); enrichWithClause(); - setState(579); + setState(588); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(575); + setState(584); match(COMMA); - setState(576); + setState(585); enrichWithClause(); } - } + } } - setState(581); + setState(590); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } @@ -5247,19 +5252,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(587); + setState(596); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(584); + setState(593); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(585); + setState(594); match(ASSIGN); } break; } - setState(589); + setState(598); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5312,13 +5317,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(591); + setState(600); match(DEV_LOOKUP); - setState(592); + setState(601); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(593); + setState(602); match(ON); - setState(594); + setState(603); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5371,18 +5376,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(596); + setState(605); match(DEV_INLINESTATS); - setState(597); + setState(606); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(600); + setState(609); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(598); + setState(607); match(BY); - setState(599); + setState(608); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5400,6 +5405,270 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class JoinCommandContext extends ParserRuleContext { + public Token type; + public TerminalNode DEV_JOIN() { return getToken(EsqlBaseParser.DEV_JOIN, 0); } + public JoinTargetContext joinTarget() { + return getRuleContext(JoinTargetContext.class,0); + } + public JoinConditionContext joinCondition() { + return getRuleContext(JoinConditionContext.class,0); + } + public TerminalNode DEV_JOIN_LOOKUP() { return getToken(EsqlBaseParser.DEV_JOIN_LOOKUP, 0); } + public TerminalNode DEV_JOIN_LEFT() { return getToken(EsqlBaseParser.DEV_JOIN_LEFT, 0); } + public TerminalNode DEV_JOIN_RIGHT() { return getToken(EsqlBaseParser.DEV_JOIN_RIGHT, 0); } + @SuppressWarnings("this-escape") + public JoinCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinCommand(this); + else return visitor.visitChildren(this); + } + } + + public final JoinCommandContext joinCommand() throws RecognitionException { + JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState()); + enterRule(_localctx, 124, RULE_joinCommand); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(612); + _errHandler.sync(this); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { + { + setState(611); + ((JoinCommandContext)_localctx).type = _input.LT(1); + _la = _input.LA(1); + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { + ((JoinCommandContext)_localctx).type = (Token)_errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + + setState(614); + match(DEV_JOIN); + setState(615); + joinTarget(); + setState(616); + joinCondition(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinTargetContext extends ParserRuleContext { + public IdentifierContext index; + public IdentifierContext alias; + public List identifier() { + return getRuleContexts(IdentifierContext.class); + } + public IdentifierContext identifier(int i) { + return getRuleContext(IdentifierContext.class,i); + } + public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } + @SuppressWarnings("this-escape") + public JoinTargetContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinTarget; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinTarget(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinTarget(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinTarget(this); + else return visitor.visitChildren(this); + } + } + + public final JoinTargetContext joinTarget() throws RecognitionException { + JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); + enterRule(_localctx, 126, RULE_joinTarget); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(618); + ((JoinTargetContext)_localctx).index = identifier(); + setState(621); + _errHandler.sync(this); + _la = _input.LA(1); + if (_la==AS) { + { + setState(619); + match(AS); + setState(620); + ((JoinTargetContext)_localctx).alias = identifier(); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinConditionContext extends ParserRuleContext { + public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } + public List joinPredicate() { + return getRuleContexts(JoinPredicateContext.class); + } + public JoinPredicateContext joinPredicate(int i) { + return getRuleContext(JoinPredicateContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + @SuppressWarnings("this-escape") + public JoinConditionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinCondition; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinCondition(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinCondition(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinCondition(this); + else return visitor.visitChildren(this); + } + } + + public final JoinConditionContext joinCondition() throws RecognitionException { + JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState()); + enterRule(_localctx, 128, RULE_joinCondition); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(623); + match(ON); + setState(624); + joinPredicate(); + setState(629); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(625); + match(COMMA); + setState(626); + joinPredicate(); + } + } + } + setState(631); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class JoinPredicateContext extends ParserRuleContext { + public ValueExpressionContext valueExpression() { + return getRuleContext(ValueExpressionContext.class,0); + } + @SuppressWarnings("this-escape") + public JoinPredicateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_joinPredicate; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterJoinPredicate(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitJoinPredicate(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitJoinPredicate(this); + else return visitor.visitChildren(this); + } + } + + public final JoinPredicateContext joinPredicate() throws RecognitionException { + JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState()); + enterRule(_localctx, 130, RULE_joinPredicate); + try { + enterOuterAlt(_localctx, 1); + { + setState(632); + valueExpression(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { case 1: @@ -5441,13 +5710,13 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in return this.isDevVersion(); case 3: return this.isDevVersion(); + case 4: + return this.isDevVersion(); } return true; } private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 4: - return this.isDevVersion(); case 5: return precpred(_ctx, 5); case 6: @@ -5487,387 +5756,406 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca } public static final String _serializedATN = - "\u0004\u0001w\u025b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+ - "\u0086\b\u0001\n\u0001\f\u0001\u0089\t\u0001\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0091\b\u0002\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00a3\b\u0003\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00b6"+ - "\b\u0005\n\u0005\f\u0005\u00b9\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00c0\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0003\u0005\u00c6\b\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00ce\b\u0005"+ - "\n\u0005\f\u0005\u00d1\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00d5"+ - "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ - "\u0006\u00dc\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e1"+ - "\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0003\b\u00ec\b\b\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0003\t\u00f2\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005"+ - "\t\u00fa\b\t\n\t\f\t\u00fd\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0003\n\u0107\b\n\u0001\n\u0001\n\u0001\n\u0005"+ - "\n\u010c\b\n\n\n\f\n\u010f\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0117\b\u000b\n\u000b\f\u000b"+ - "\u011a\t\u000b\u0003\u000b\u011c\b\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0005\u000f\u012a\b\u000f\n\u000f\f\u000f\u012d"+ - "\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u0132\b\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0005\u0011\u013a\b\u0011\n\u0011\f\u0011\u013d\t\u0011\u0001\u0011\u0003"+ - "\u0011\u0140\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u0145"+ - "\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u014f\b\u0015\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0155\b\u0016\n\u0016\f\u0016"+ - "\u0158\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u0162\b\u0018\n\u0018"+ - "\f\u0018\u0165\t\u0018\u0001\u0018\u0003\u0018\u0168\b\u0018\u0001\u0018"+ - "\u0001\u0018\u0003\u0018\u016c\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0003\u001a\u0173\b\u001a\u0001\u001a\u0001\u001a"+ - "\u0003\u001a\u0177\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ - "\u017c\b\u001b\n\u001b\f\u001b\u017f\t\u001b\u0001\u001c\u0001\u001c\u0001"+ - "\u001c\u0003\u001c\u0184\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005"+ - "\u001d\u0189\b\u001d\n\u001d\f\u001d\u018c\t\u001d\u0001\u001e\u0001\u001e"+ - "\u0001\u001e\u0005\u001e\u0191\b\u001e\n\u001e\f\u001e\u0194\t\u001e\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0199\b\u001f\n\u001f\f\u001f"+ - "\u019c\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01a3\b!\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0005\"\u01b2\b\"\n\"\f\"\u01b5\t\"\u0001\""+ - "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bd\b\"\n\"\f\"\u01c0"+ - "\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c8\b\""+ - "\n\"\f\"\u01cb\t\"\u0001\"\u0001\"\u0003\"\u01cf\b\"\u0001#\u0001#\u0003"+ - "#\u01d3\b#\u0001$\u0001$\u0001$\u0003$\u01d8\b$\u0001%\u0001%\u0001%\u0001"+ - "&\u0001&\u0001&\u0001&\u0005&\u01e1\b&\n&\f&\u01e4\t&\u0001\'\u0001\'"+ - "\u0003\'\u01e8\b\'\u0001\'\u0001\'\u0003\'\u01ec\b\'\u0001(\u0001(\u0001"+ - "(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005*\u01f8\b*\n*"+ - "\f*\u01fb\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0003"+ - ",\u0205\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ - "/\u0001/\u0005/\u0211\b/\n/\f/\u0214\t/\u00010\u00010\u00010\u00010\u0001"+ - "1\u00011\u00012\u00012\u00032\u021e\b2\u00013\u00033\u0221\b3\u00013\u0001"+ - "3\u00014\u00034\u0226\b4\u00014\u00014\u00015\u00015\u00016\u00016\u0001"+ - "7\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001"+ - ":\u0001:\u0001:\u0001:\u0003:\u023c\b:\u0001:\u0001:\u0001:\u0001:\u0005"+ - ":\u0242\b:\n:\f:\u0245\t:\u0003:\u0247\b:\u0001;\u0001;\u0001;\u0003;"+ - "\u024c\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ - "=\u0001=\u0001=\u0003=\u0259\b=\u0001=\u0000\u0004\u0002\n\u0012\u0014"+ - ">\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a"+ - "\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz\u0000\b\u0001"+ - "\u0000;<\u0001\u0000=?\u0002\u0000\u001a\u001aLL\u0001\u0000CD\u0002\u0000"+ - "\u001f\u001f##\u0002\u0000&&))\u0002\u0000%%33\u0002\u0000446:\u0274\u0000"+ - "|\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0004\u0090"+ - "\u0001\u0000\u0000\u0000\u0006\u00a2\u0001\u0000\u0000\u0000\b\u00a4\u0001"+ - "\u0000\u0000\u0000\n\u00c5\u0001\u0000\u0000\u0000\f\u00e0\u0001\u0000"+ - "\u0000\u0000\u000e\u00e2\u0001\u0000\u0000\u0000\u0010\u00eb\u0001\u0000"+ - "\u0000\u0000\u0012\u00f1\u0001\u0000\u0000\u0000\u0014\u0106\u0001\u0000"+ - "\u0000\u0000\u0016\u0110\u0001\u0000\u0000\u0000\u0018\u011f\u0001\u0000"+ - "\u0000\u0000\u001a\u0121\u0001\u0000\u0000\u0000\u001c\u0123\u0001\u0000"+ - "\u0000\u0000\u001e\u0126\u0001\u0000\u0000\u0000 \u0131\u0001\u0000\u0000"+ - "\u0000\"\u0135\u0001\u0000\u0000\u0000$\u0144\u0001\u0000\u0000\u0000"+ - "&\u0148\u0001\u0000\u0000\u0000(\u014a\u0001\u0000\u0000\u0000*\u014e"+ - "\u0001\u0000\u0000\u0000,\u0150\u0001\u0000\u0000\u0000.\u0159\u0001\u0000"+ - "\u0000\u00000\u015d\u0001\u0000\u0000\u00002\u016d\u0001\u0000\u0000\u0000"+ - "4\u0170\u0001\u0000\u0000\u00006\u0178\u0001\u0000\u0000\u00008\u0180"+ - "\u0001\u0000\u0000\u0000:\u0185\u0001\u0000\u0000\u0000<\u018d\u0001\u0000"+ - "\u0000\u0000>\u0195\u0001\u0000\u0000\u0000@\u019d\u0001\u0000\u0000\u0000"+ - "B\u01a2\u0001\u0000\u0000\u0000D\u01ce\u0001\u0000\u0000\u0000F\u01d2"+ - "\u0001\u0000\u0000\u0000H\u01d7\u0001\u0000\u0000\u0000J\u01d9\u0001\u0000"+ - "\u0000\u0000L\u01dc\u0001\u0000\u0000\u0000N\u01e5\u0001\u0000\u0000\u0000"+ - "P\u01ed\u0001\u0000\u0000\u0000R\u01f0\u0001\u0000\u0000\u0000T\u01f3"+ - "\u0001\u0000\u0000\u0000V\u01fc\u0001\u0000\u0000\u0000X\u0200\u0001\u0000"+ - "\u0000\u0000Z\u0206\u0001\u0000\u0000\u0000\\\u020a\u0001\u0000\u0000"+ - "\u0000^\u020d\u0001\u0000\u0000\u0000`\u0215\u0001\u0000\u0000\u0000b"+ - "\u0219\u0001\u0000\u0000\u0000d\u021d\u0001\u0000\u0000\u0000f\u0220\u0001"+ - "\u0000\u0000\u0000h\u0225\u0001\u0000\u0000\u0000j\u0229\u0001\u0000\u0000"+ - "\u0000l\u022b\u0001\u0000\u0000\u0000n\u022d\u0001\u0000\u0000\u0000p"+ - "\u0230\u0001\u0000\u0000\u0000r\u0234\u0001\u0000\u0000\u0000t\u0237\u0001"+ - "\u0000\u0000\u0000v\u024b\u0001\u0000\u0000\u0000x\u024f\u0001\u0000\u0000"+ - "\u0000z\u0254\u0001\u0000\u0000\u0000|}\u0003\u0002\u0001\u0000}~\u0005"+ - "\u0000\u0000\u0001~\u0001\u0001\u0000\u0000\u0000\u007f\u0080\u0006\u0001"+ - "\uffff\uffff\u0000\u0080\u0081\u0003\u0004\u0002\u0000\u0081\u0087\u0001"+ - "\u0000\u0000\u0000\u0082\u0083\n\u0001\u0000\u0000\u0083\u0084\u0005\u0019"+ - "\u0000\u0000\u0084\u0086\u0003\u0006\u0003\u0000\u0085\u0082\u0001\u0000"+ - "\u0000\u0000\u0086\u0089\u0001\u0000\u0000\u0000\u0087\u0085\u0001\u0000"+ - "\u0000\u0000\u0087\u0088\u0001\u0000\u0000\u0000\u0088\u0003\u0001\u0000"+ - "\u0000\u0000\u0089\u0087\u0001\u0000\u0000\u0000\u008a\u0091\u0003n7\u0000"+ - "\u008b\u0091\u0003\"\u0011\u0000\u008c\u0091\u0003\u001c\u000e\u0000\u008d"+ - "\u0091\u0003r9\u0000\u008e\u008f\u0004\u0002\u0001\u0000\u008f\u0091\u0003"+ - "0\u0018\u0000\u0090\u008a\u0001\u0000\u0000\u0000\u0090\u008b\u0001\u0000"+ - "\u0000\u0000\u0090\u008c\u0001\u0000\u0000\u0000\u0090\u008d\u0001\u0000"+ - "\u0000\u0000\u0090\u008e\u0001\u0000\u0000\u0000\u0091\u0005\u0001\u0000"+ - "\u0000\u0000\u0092\u00a3\u00032\u0019\u0000\u0093\u00a3\u0003\b\u0004"+ - "\u0000\u0094\u00a3\u0003P(\u0000\u0095\u00a3\u0003J%\u0000\u0096\u00a3"+ - "\u00034\u001a\u0000\u0097\u00a3\u0003L&\u0000\u0098\u00a3\u0003R)\u0000"+ - "\u0099\u00a3\u0003T*\u0000\u009a\u00a3\u0003X,\u0000\u009b\u00a3\u0003"+ - "Z-\u0000\u009c\u00a3\u0003t:\u0000\u009d\u00a3\u0003\\.\u0000\u009e\u009f"+ - "\u0004\u0003\u0002\u0000\u009f\u00a3\u0003z=\u0000\u00a0\u00a1\u0004\u0003"+ - "\u0003\u0000\u00a1\u00a3\u0003x<\u0000\u00a2\u0092\u0001\u0000\u0000\u0000"+ - "\u00a2\u0093\u0001\u0000\u0000\u0000\u00a2\u0094\u0001\u0000\u0000\u0000"+ - "\u00a2\u0095\u0001\u0000\u0000\u0000\u00a2\u0096\u0001\u0000\u0000\u0000"+ - "\u00a2\u0097\u0001\u0000\u0000\u0000\u00a2\u0098\u0001\u0000\u0000\u0000"+ - "\u00a2\u0099\u0001\u0000\u0000\u0000\u00a2\u009a\u0001\u0000\u0000\u0000"+ - "\u00a2\u009b\u0001\u0000\u0000\u0000\u00a2\u009c\u0001\u0000\u0000\u0000"+ - "\u00a2\u009d\u0001\u0000\u0000\u0000\u00a2\u009e\u0001\u0000\u0000\u0000"+ - "\u00a2\u00a0\u0001\u0000\u0000\u0000\u00a3\u0007\u0001\u0000\u0000\u0000"+ - "\u00a4\u00a5\u0005\u0010\u0000\u0000\u00a5\u00a6\u0003\n\u0005\u0000\u00a6"+ - "\t\u0001\u0000\u0000\u0000\u00a7\u00a8\u0006\u0005\uffff\uffff\u0000\u00a8"+ - "\u00a9\u0005,\u0000\u0000\u00a9\u00c6\u0003\n\u0005\b\u00aa\u00c6\u0003"+ - "\u0010\b\u0000\u00ab\u00c6\u0003\f\u0006\u0000\u00ac\u00ae\u0003\u0010"+ - "\b\u0000\u00ad\u00af\u0005,\u0000\u0000\u00ae\u00ad\u0001\u0000\u0000"+ - "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000\u0000"+ - "\u0000\u00b0\u00b1\u0005\'\u0000\u0000\u00b1\u00b2\u0005+\u0000\u0000"+ - "\u00b2\u00b7\u0003\u0010\b\u0000\u00b3\u00b4\u0005\"\u0000\u0000\u00b4"+ - "\u00b6\u0003\u0010\b\u0000\u00b5\u00b3\u0001\u0000\u0000\u0000\u00b6\u00b9"+ - "\u0001\u0000\u0000\u0000\u00b7\u00b5\u0001\u0000\u0000\u0000\u00b7\u00b8"+ - "\u0001\u0000\u0000\u0000\u00b8\u00ba\u0001\u0000\u0000\u0000\u00b9\u00b7"+ - "\u0001\u0000\u0000\u0000\u00ba\u00bb\u00052\u0000\u0000\u00bb\u00c6\u0001"+ - "\u0000\u0000\u0000\u00bc\u00bd\u0003\u0010\b\u0000\u00bd\u00bf\u0005("+ - "\u0000\u0000\u00be\u00c0\u0005,\u0000\u0000\u00bf\u00be\u0001\u0000\u0000"+ - "\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0\u00c1\u0001\u0000\u0000"+ - "\u0000\u00c1\u00c2\u0005-\u0000\u0000\u00c2\u00c6\u0001\u0000\u0000\u0000"+ - "\u00c3\u00c4\u0004\u0005\u0004\u0000\u00c4\u00c6\u0003\u000e\u0007\u0000"+ - "\u00c5\u00a7\u0001\u0000\u0000\u0000\u00c5\u00aa\u0001\u0000\u0000\u0000"+ - "\u00c5\u00ab\u0001\u0000\u0000\u0000\u00c5\u00ac\u0001\u0000\u0000\u0000"+ - "\u00c5\u00bc\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000\u0000\u0000"+ - "\u00c6\u00cf\u0001\u0000\u0000\u0000\u00c7\u00c8\n\u0005\u0000\u0000\u00c8"+ - "\u00c9\u0005\u001e\u0000\u0000\u00c9\u00ce\u0003\n\u0005\u0006\u00ca\u00cb"+ - "\n\u0004\u0000\u0000\u00cb\u00cc\u0005/\u0000\u0000\u00cc\u00ce\u0003"+ - "\n\u0005\u0005\u00cd\u00c7\u0001\u0000\u0000\u0000\u00cd\u00ca\u0001\u0000"+ - "\u0000\u0000\u00ce\u00d1\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000"+ - "\u0000\u0000\u00cf\u00d0\u0001\u0000\u0000\u0000\u00d0\u000b\u0001\u0000"+ - "\u0000\u0000\u00d1\u00cf\u0001\u0000\u0000\u0000\u00d2\u00d4\u0003\u0010"+ - "\b\u0000\u00d3\u00d5\u0005,\u0000\u0000\u00d4\u00d3\u0001\u0000\u0000"+ - "\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000"+ - "\u0000\u00d6\u00d7\u0005*\u0000\u0000\u00d7\u00d8\u0003j5\u0000\u00d8"+ - "\u00e1\u0001\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0000\u00da\u00dc"+ - "\u0005,\u0000\u0000\u00db\u00da\u0001\u0000\u0000\u0000\u00db\u00dc\u0001"+ - "\u0000\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de\u0005"+ - "1\u0000\u0000\u00de\u00df\u0003j5\u0000\u00df\u00e1\u0001\u0000\u0000"+ - "\u0000\u00e0\u00d2\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001\u0000\u0000"+ - "\u0000\u00e1\r\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003:\u001d\u0000"+ - "\u00e3\u00e4\u0005\u0018\u0000\u0000\u00e4\u00e5\u0003D\"\u0000\u00e5"+ - "\u000f\u0001\u0000\u0000\u0000\u00e6\u00ec\u0003\u0012\t\u0000\u00e7\u00e8"+ - "\u0003\u0012\t\u0000\u00e8\u00e9\u0003l6\u0000\u00e9\u00ea\u0003\u0012"+ - "\t\u0000\u00ea\u00ec\u0001\u0000\u0000\u0000\u00eb\u00e6\u0001\u0000\u0000"+ - "\u0000\u00eb\u00e7\u0001\u0000\u0000\u0000\u00ec\u0011\u0001\u0000\u0000"+ - "\u0000\u00ed\u00ee\u0006\t\uffff\uffff\u0000\u00ee\u00f2\u0003\u0014\n"+ - "\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003\u0012\t\u0003"+ - "\u00f1\u00ed\u0001\u0000\u0000\u0000\u00f1\u00ef\u0001\u0000\u0000\u0000"+ - "\u00f2\u00fb\u0001\u0000\u0000\u0000\u00f3\u00f4\n\u0002\u0000\u0000\u00f4"+ - "\u00f5\u0007\u0001\u0000\u0000\u00f5\u00fa\u0003\u0012\t\u0003\u00f6\u00f7"+ - "\n\u0001\u0000\u0000\u00f7\u00f8\u0007\u0000\u0000\u0000\u00f8\u00fa\u0003"+ - "\u0012\t\u0002\u00f9\u00f3\u0001\u0000\u0000\u0000\u00f9\u00f6\u0001\u0000"+ - "\u0000\u0000\u00fa\u00fd\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000"+ - "\u0000\u0000\u00fb\u00fc\u0001\u0000\u0000\u0000\u00fc\u0013\u0001\u0000"+ - "\u0000\u0000\u00fd\u00fb\u0001\u0000\u0000\u0000\u00fe\u00ff\u0006\n\uffff"+ - "\uffff\u0000\u00ff\u0107\u0003D\"\u0000\u0100\u0107\u0003:\u001d\u0000"+ - "\u0101\u0107\u0003\u0016\u000b\u0000\u0102\u0103\u0005+\u0000\u0000\u0103"+ - "\u0104\u0003\n\u0005\u0000\u0104\u0105\u00052\u0000\u0000\u0105\u0107"+ - "\u0001\u0000\u0000\u0000\u0106\u00fe\u0001\u0000\u0000\u0000\u0106\u0100"+ - "\u0001\u0000\u0000\u0000\u0106\u0101\u0001\u0000\u0000\u0000\u0106\u0102"+ - "\u0001\u0000\u0000\u0000\u0107\u010d\u0001\u0000\u0000\u0000\u0108\u0109"+ - "\n\u0001\u0000\u0000\u0109\u010a\u0005!\u0000\u0000\u010a\u010c\u0003"+ - "\u001a\r\u0000\u010b\u0108\u0001\u0000\u0000\u0000\u010c\u010f\u0001\u0000"+ - "\u0000\u0000\u010d\u010b\u0001\u0000\u0000\u0000\u010d\u010e\u0001\u0000"+ - "\u0000\u0000\u010e\u0015\u0001\u0000\u0000\u0000\u010f\u010d\u0001\u0000"+ - "\u0000\u0000\u0110\u0111\u0003\u0018\f\u0000\u0111\u011b\u0005+\u0000"+ - "\u0000\u0112\u011c\u0005=\u0000\u0000\u0113\u0118\u0003\n\u0005\u0000"+ - "\u0114\u0115\u0005\"\u0000\u0000\u0115\u0117\u0003\n\u0005\u0000\u0116"+ - "\u0114\u0001\u0000\u0000\u0000\u0117\u011a\u0001\u0000\u0000\u0000\u0118"+ - "\u0116\u0001\u0000\u0000\u0000\u0118\u0119\u0001\u0000\u0000\u0000\u0119"+ - "\u011c\u0001\u0000\u0000\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011b"+ - "\u0112\u0001\u0000\u0000\u0000\u011b\u0113\u0001\u0000\u0000\u0000\u011b"+ - "\u011c\u0001\u0000\u0000\u0000\u011c\u011d\u0001\u0000\u0000\u0000\u011d"+ - "\u011e\u00052\u0000\u0000\u011e\u0017\u0001\u0000\u0000\u0000\u011f\u0120"+ - "\u0003H$\u0000\u0120\u0019\u0001\u0000\u0000\u0000\u0121\u0122\u0003@"+ - " \u0000\u0122\u001b\u0001\u0000\u0000\u0000\u0123\u0124\u0005\f\u0000"+ - "\u0000\u0124\u0125\u0003\u001e\u000f\u0000\u0125\u001d\u0001\u0000\u0000"+ - "\u0000\u0126\u012b\u0003 \u0010\u0000\u0127\u0128\u0005\"\u0000\u0000"+ - "\u0128\u012a\u0003 \u0010\u0000\u0129\u0127\u0001\u0000\u0000\u0000\u012a"+ - "\u012d\u0001\u0000\u0000\u0000\u012b\u0129\u0001\u0000\u0000\u0000\u012b"+ - "\u012c\u0001\u0000\u0000\u0000\u012c\u001f\u0001\u0000\u0000\u0000\u012d"+ - "\u012b\u0001\u0000\u0000\u0000\u012e\u012f\u0003:\u001d\u0000\u012f\u0130"+ - "\u0005 \u0000\u0000\u0130\u0132\u0001\u0000\u0000\u0000\u0131\u012e\u0001"+ - "\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132\u0133\u0001"+ - "\u0000\u0000\u0000\u0133\u0134\u0003\n\u0005\u0000\u0134!\u0001\u0000"+ - "\u0000\u0000\u0135\u0136\u0005\u0006\u0000\u0000\u0136\u013b\u0003$\u0012"+ - "\u0000\u0137\u0138\u0005\"\u0000\u0000\u0138\u013a\u0003$\u0012\u0000"+ - "\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000\u0000"+ - "\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000\u0000"+ - "\u013c\u013f\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u0140\u0003*\u0015\u0000\u013f\u013e\u0001\u0000\u0000\u0000\u013f"+ - "\u0140\u0001\u0000\u0000\u0000\u0140#\u0001\u0000\u0000\u0000\u0141\u0142"+ - "\u0003&\u0013\u0000\u0142\u0143\u0005\u0018\u0000\u0000\u0143\u0145\u0001"+ - "\u0000\u0000\u0000\u0144\u0141\u0001\u0000\u0000\u0000\u0144\u0145\u0001"+ - "\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0147\u0003"+ - "(\u0014\u0000\u0147%\u0001\u0000\u0000\u0000\u0148\u0149\u0005L\u0000"+ - "\u0000\u0149\'\u0001\u0000\u0000\u0000\u014a\u014b\u0007\u0002\u0000\u0000"+ - "\u014b)\u0001\u0000\u0000\u0000\u014c\u014f\u0003,\u0016\u0000\u014d\u014f"+ - "\u0003.\u0017\u0000\u014e\u014c\u0001\u0000\u0000\u0000\u014e\u014d\u0001"+ - "\u0000\u0000\u0000\u014f+\u0001\u0000\u0000\u0000\u0150\u0151\u0005K\u0000"+ - "\u0000\u0151\u0156\u0005L\u0000\u0000\u0152\u0153\u0005\"\u0000\u0000"+ - "\u0153\u0155\u0005L\u0000\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+ - "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+ - "\u0157\u0001\u0000\u0000\u0000\u0157-\u0001\u0000\u0000\u0000\u0158\u0156"+ - "\u0001\u0000\u0000\u0000\u0159\u015a\u0005A\u0000\u0000\u015a\u015b\u0003"+ - ",\u0016\u0000\u015b\u015c\u0005B\u0000\u0000\u015c/\u0001\u0000\u0000"+ - "\u0000\u015d\u015e\u0005\u0013\u0000\u0000\u015e\u0163\u0003$\u0012\u0000"+ - "\u015f\u0160\u0005\"\u0000\u0000\u0160\u0162\u0003$\u0012\u0000\u0161"+ - "\u015f\u0001\u0000\u0000\u0000\u0162\u0165\u0001\u0000\u0000\u0000\u0163"+ - "\u0161\u0001\u0000\u0000\u0000\u0163\u0164\u0001\u0000\u0000\u0000\u0164"+ - "\u0167\u0001\u0000\u0000\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166"+ - "\u0168\u00036\u001b\u0000\u0167\u0166\u0001\u0000\u0000\u0000\u0167\u0168"+ - "\u0001\u0000\u0000\u0000\u0168\u016b\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0005\u001d\u0000\u0000\u016a\u016c\u0003\u001e\u000f\u0000\u016b\u0169"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000\u0000\u016c1\u0001"+ - "\u0000\u0000\u0000\u016d\u016e\u0005\u0004\u0000\u0000\u016e\u016f\u0003"+ - "\u001e\u000f\u0000\u016f3\u0001\u0000\u0000\u0000\u0170\u0172\u0005\u000f"+ - "\u0000\u0000\u0171\u0173\u00036\u001b\u0000\u0172\u0171\u0001\u0000\u0000"+ - "\u0000\u0172\u0173\u0001\u0000\u0000\u0000\u0173\u0176\u0001\u0000\u0000"+ - "\u0000\u0174\u0175\u0005\u001d\u0000\u0000\u0175\u0177\u0003\u001e\u000f"+ - "\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001\u0000\u0000"+ - "\u0000\u01775\u0001\u0000\u0000\u0000\u0178\u017d\u00038\u001c\u0000\u0179"+ - "\u017a\u0005\"\u0000\u0000\u017a\u017c\u00038\u001c\u0000\u017b\u0179"+ - "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017b"+ - "\u0001\u0000\u0000\u0000\u017d\u017e\u0001\u0000\u0000\u0000\u017e7\u0001"+ - "\u0000\u0000\u0000\u017f\u017d\u0001\u0000\u0000\u0000\u0180\u0183\u0003"+ - " \u0010\u0000\u0181\u0182\u0005\u0010\u0000\u0000\u0182\u0184\u0003\n"+ - "\u0005\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ - "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u018a\u0003H$\u0000"+ - "\u0186\u0187\u0005$\u0000\u0000\u0187\u0189\u0003H$\u0000\u0188\u0186"+ - "\u0001\u0000\u0000\u0000\u0189\u018c\u0001\u0000\u0000\u0000\u018a\u0188"+ - "\u0001\u0000\u0000\u0000\u018a\u018b\u0001\u0000\u0000\u0000\u018b;\u0001"+ - "\u0000\u0000\u0000\u018c\u018a\u0001\u0000\u0000\u0000\u018d\u0192\u0003"+ - "B!\u0000\u018e\u018f\u0005$\u0000\u0000\u018f\u0191\u0003B!\u0000\u0190"+ - "\u018e\u0001\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192"+ - "\u0190\u0001\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ - "=\u0001\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u019a"+ - "\u0003<\u001e\u0000\u0196\u0197\u0005\"\u0000\u0000\u0197\u0199\u0003"+ - "<\u001e\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000"+ - "\u0000\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000"+ - "\u0000\u0000\u019b?\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ - "\u0000\u019d\u019e\u0007\u0003\u0000\u0000\u019eA\u0001\u0000\u0000\u0000"+ - "\u019f\u01a3\u0005P\u0000\u0000\u01a0\u01a1\u0004!\n\u0000\u01a1\u01a3"+ - "\u0003F#\u0000\u01a2\u019f\u0001\u0000\u0000\u0000\u01a2\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a3C\u0001\u0000\u0000\u0000\u01a4\u01cf\u0005-\u0000\u0000"+ - "\u01a5\u01a6\u0003h4\u0000\u01a6\u01a7\u0005C\u0000\u0000\u01a7\u01cf"+ - "\u0001\u0000\u0000\u0000\u01a8\u01cf\u0003f3\u0000\u01a9\u01cf\u0003h"+ - "4\u0000\u01aa\u01cf\u0003b1\u0000\u01ab\u01cf\u0003F#\u0000\u01ac\u01cf"+ - "\u0003j5\u0000\u01ad\u01ae\u0005A\u0000\u0000\u01ae\u01b3\u0003d2\u0000"+ - "\u01af\u01b0\u0005\"\u0000\u0000\u01b0\u01b2\u0003d2\u0000\u01b1\u01af"+ - "\u0001\u0000\u0000\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1"+ - "\u0001\u0000\u0000\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4\u01b6"+ - "\u0001\u0000\u0000\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7"+ - "\u0005B\u0000\u0000\u01b7\u01cf\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005"+ - "A\u0000\u0000\u01b9\u01be\u0003b1\u0000\u01ba\u01bb\u0005\"\u0000\u0000"+ - "\u01bb\u01bd\u0003b1\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01c0"+ - "\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf"+ - "\u0001\u0000\u0000\u0000\u01bf\u01c1\u0001\u0000\u0000\u0000\u01c0\u01be"+ - "\u0001\u0000\u0000\u0000\u01c1\u01c2\u0005B\u0000\u0000\u01c2\u01cf\u0001"+ - "\u0000\u0000\u0000\u01c3\u01c4\u0005A\u0000\u0000\u01c4\u01c9\u0003j5"+ - "\u0000\u01c5\u01c6\u0005\"\u0000\u0000\u01c6\u01c8\u0003j5\u0000\u01c7"+ - "\u01c5\u0001\u0000\u0000\u0000\u01c8\u01cb\u0001\u0000\u0000\u0000\u01c9"+ - "\u01c7\u0001\u0000\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca"+ - "\u01cc\u0001\u0000\u0000\u0000\u01cb\u01c9\u0001\u0000\u0000\u0000\u01cc"+ - "\u01cd\u0005B\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce\u01a4"+ - "\u0001\u0000\u0000\u0000\u01ce\u01a5\u0001\u0000\u0000\u0000\u01ce\u01a8"+ - "\u0001\u0000\u0000\u0000\u01ce\u01a9\u0001\u0000\u0000\u0000\u01ce\u01aa"+ - "\u0001\u0000\u0000\u0000\u01ce\u01ab\u0001\u0000\u0000\u0000\u01ce\u01ac"+ - "\u0001\u0000\u0000\u0000\u01ce\u01ad\u0001\u0000\u0000\u0000\u01ce\u01b8"+ - "\u0001\u0000\u0000\u0000\u01ce\u01c3\u0001\u0000\u0000\u0000\u01cfE\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d3\u00050\u0000\u0000\u01d1\u01d3\u0005@\u0000"+ - "\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000"+ - "\u0000\u01d3G\u0001\u0000\u0000\u0000\u01d4\u01d8\u0003@ \u0000\u01d5"+ - "\u01d6\u0004$\u000b\u0000\u01d6\u01d8\u0003F#\u0000\u01d7\u01d4\u0001"+ - "\u0000\u0000\u0000\u01d7\u01d5\u0001\u0000\u0000\u0000\u01d8I\u0001\u0000"+ - "\u0000\u0000\u01d9\u01da\u0005\t\u0000\u0000\u01da\u01db\u0005\u001b\u0000"+ - "\u0000\u01dbK\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005\u000e\u0000\u0000"+ - "\u01dd\u01e2\u0003N\'\u0000\u01de\u01df\u0005\"\u0000\u0000\u01df\u01e1"+ - "\u0003N\'\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4\u0001"+ - "\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001"+ - "\u0000\u0000\u0000\u01e3M\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001\u0000"+ - "\u0000\u0000\u01e5\u01e7\u0003\n\u0005\u0000\u01e6\u01e8\u0007\u0004\u0000"+ - "\u0000\u01e7\u01e6\u0001\u0000\u0000\u0000\u01e7\u01e8\u0001\u0000\u0000"+ - "\u0000\u01e8\u01eb\u0001\u0000\u0000\u0000\u01e9\u01ea\u0005.\u0000\u0000"+ - "\u01ea\u01ec\u0007\u0005\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ - "\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecO\u0001\u0000\u0000\u0000\u01ed"+ - "\u01ee\u0005\b\u0000\u0000\u01ee\u01ef\u0003>\u001f\u0000\u01efQ\u0001"+ - "\u0000\u0000\u0000\u01f0\u01f1\u0005\u0002\u0000\u0000\u01f1\u01f2\u0003"+ - ">\u001f\u0000\u01f2S\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u000b\u0000"+ - "\u0000\u01f4\u01f9\u0003V+\u0000\u01f5\u01f6\u0005\"\u0000\u0000\u01f6"+ - "\u01f8\u0003V+\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fb\u0001"+ - "\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01f9\u01fa\u0001"+ - "\u0000\u0000\u0000\u01faU\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fd\u0003<\u001e\u0000\u01fd\u01fe\u0005T\u0000\u0000"+ - "\u01fe\u01ff\u0003<\u001e\u0000\u01ffW\u0001\u0000\u0000\u0000\u0200\u0201"+ - "\u0005\u0001\u0000\u0000\u0201\u0202\u0003\u0014\n\u0000\u0202\u0204\u0003"+ - "j5\u0000\u0203\u0205\u0003^/\u0000\u0204\u0203\u0001\u0000\u0000\u0000"+ - "\u0204\u0205\u0001\u0000\u0000\u0000\u0205Y\u0001\u0000\u0000\u0000\u0206"+ - "\u0207\u0005\u0007\u0000\u0000\u0207\u0208\u0003\u0014\n\u0000\u0208\u0209"+ - "\u0003j5\u0000\u0209[\u0001\u0000\u0000\u0000\u020a\u020b\u0005\n\u0000"+ - "\u0000\u020b\u020c\u0003:\u001d\u0000\u020c]\u0001\u0000\u0000\u0000\u020d"+ - "\u0212\u0003`0\u0000\u020e\u020f\u0005\"\u0000\u0000\u020f\u0211\u0003"+ - "`0\u0000\u0210\u020e\u0001\u0000\u0000\u0000\u0211\u0214\u0001\u0000\u0000"+ - "\u0000\u0212\u0210\u0001\u0000\u0000\u0000\u0212\u0213\u0001\u0000\u0000"+ - "\u0000\u0213_\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000"+ - "\u0215\u0216\u0003@ \u0000\u0216\u0217\u0005 \u0000\u0000\u0217\u0218"+ - "\u0003D\"\u0000\u0218a\u0001\u0000\u0000\u0000\u0219\u021a\u0007\u0006"+ - "\u0000\u0000\u021ac\u0001\u0000\u0000\u0000\u021b\u021e\u0003f3\u0000"+ - "\u021c\u021e\u0003h4\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021d\u021c"+ - "\u0001\u0000\u0000\u0000\u021ee\u0001\u0000\u0000\u0000\u021f\u0221\u0007"+ - "\u0000\u0000\u0000\u0220\u021f\u0001\u0000\u0000\u0000\u0220\u0221\u0001"+ - "\u0000\u0000\u0000\u0221\u0222\u0001\u0000\u0000\u0000\u0222\u0223\u0005"+ - "\u001c\u0000\u0000\u0223g\u0001\u0000\u0000\u0000\u0224\u0226\u0007\u0000"+ - "\u0000\u0000\u0225\u0224\u0001\u0000\u0000\u0000\u0225\u0226\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0001\u0000\u0000\u0000\u0227\u0228\u0005\u001b"+ - "\u0000\u0000\u0228i\u0001\u0000\u0000\u0000\u0229\u022a\u0005\u001a\u0000"+ - "\u0000\u022ak\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0007\u0000\u0000"+ - "\u022cm\u0001\u0000\u0000\u0000\u022d\u022e\u0005\u0005\u0000\u0000\u022e"+ - "\u022f\u0003p8\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005"+ - "A\u0000\u0000\u0231\u0232\u0003\u0002\u0001\u0000\u0232\u0233\u0005B\u0000"+ - "\u0000\u0233q\u0001\u0000\u0000\u0000\u0234\u0235\u0005\r\u0000\u0000"+ - "\u0235\u0236\u0005d\u0000\u0000\u0236s\u0001\u0000\u0000\u0000\u0237\u0238"+ - "\u0005\u0003\u0000\u0000\u0238\u023b\u0005Z\u0000\u0000\u0239\u023a\u0005"+ - "X\u0000\u0000\u023a\u023c\u0003<\u001e\u0000\u023b\u0239\u0001\u0000\u0000"+ - "\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u0246\u0001\u0000\u0000"+ - "\u0000\u023d\u023e\u0005Y\u0000\u0000\u023e\u0243\u0003v;\u0000\u023f"+ - "\u0240\u0005\"\u0000\u0000\u0240\u0242\u0003v;\u0000\u0241\u023f\u0001"+ - "\u0000\u0000\u0000\u0242\u0245\u0001\u0000\u0000\u0000\u0243\u0241\u0001"+ - "\u0000\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0247\u0001"+ - "\u0000\u0000\u0000\u0245\u0243\u0001\u0000\u0000\u0000\u0246\u023d\u0001"+ - "\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247u\u0001\u0000"+ - "\u0000\u0000\u0248\u0249\u0003<\u001e\u0000\u0249\u024a\u0005 \u0000\u0000"+ - "\u024a\u024c\u0001\u0000\u0000\u0000\u024b\u0248\u0001\u0000\u0000\u0000"+ - "\u024b\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000"+ - "\u024d\u024e\u0003<\u001e\u0000\u024ew\u0001\u0000\u0000\u0000\u024f\u0250"+ - "\u0005\u0012\u0000\u0000\u0250\u0251\u0003$\u0012\u0000\u0251\u0252\u0005"+ - "X\u0000\u0000\u0252\u0253\u0003>\u001f\u0000\u0253y\u0001\u0000\u0000"+ - "\u0000\u0254\u0255\u0005\u0011\u0000\u0000\u0255\u0258\u00036\u001b\u0000"+ - "\u0256\u0257\u0005\u001d\u0000\u0000\u0257\u0259\u0003\u001e\u000f\u0000"+ - "\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000"+ - "\u0259{\u0001\u0000\u0000\u0000:\u0087\u0090\u00a2\u00ae\u00b7\u00bf\u00c5"+ - "\u00cd\u00cf\u00d4\u00db\u00e0\u00eb\u00f1\u00f9\u00fb\u0106\u010d\u0118"+ - "\u011b\u012b\u0131\u013b\u013f\u0144\u014e\u0156\u0163\u0167\u016b\u0172"+ - "\u0176\u017d\u0183\u018a\u0192\u019a\u01a2\u01b3\u01be\u01c9\u01ce\u01d2"+ - "\u01d7\u01e2\u01e7\u01eb\u01f9\u0204\u0212\u021d\u0220\u0225\u023b\u0243"+ - "\u0246\u024b\u0258"; + "\u0004\u0001\u0080\u027b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ + "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ + "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ + "\u0002\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007"+ + "\u000f\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007"+ + "\u0012\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007"+ + "\u0015\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007"+ + "\u0018\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007"+ + "\u001b\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007"+ + "\u001e\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007"+ + "\"\u0002#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007"+ + "\'\u0002(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007"+ + ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ + "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ + "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ + "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ + "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ + "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ + "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f5\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u00fb\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u0103\b\t\n\t\f\t\u0106\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0110\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u0115\b\n\n\n\f\n\u0118\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0120\b\u000b\n\u000b"+ + "\f\u000b\u0123\t\u000b\u0003\u000b\u0125\b\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0133\b\u000f\n\u000f\f\u000f"+ + "\u0136\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u013b\b"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0005\u0011\u0143\b\u0011\n\u0011\f\u0011\u0146\t\u0011\u0001\u0011"+ + "\u0003\u0011\u0149\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012"+ + "\u014e\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0158\b\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015e\b\u0016\n\u0016"+ + "\f\u0016\u0161\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016b\b\u0018"+ + "\n\u0018\f\u0018\u016e\t\u0018\u0001\u0018\u0003\u0018\u0171\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0175\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017c\b\u001a\u0001\u001a\u0001"+ + "\u001a\u0003\u001a\u0180\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ + "\u001b\u0185\b\u001b\n\u001b\f\u001b\u0188\t\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u018d\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u0192\b\u001d\n\u001d\f\u001d\u0195\t\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u019a\b\u001e\n\u001e\f\u001e\u019d\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a2\b\u001f\n\u001f"+ + "\f\u001f\u01a5\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01ac"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f\"\u01be\t\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c6\b\"\n\""+ + "\f\"\u01c9\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u01d1\b\"\n\"\f\"\u01d4\t\"\u0001\"\u0001\"\u0003\"\u01d8\b\"\u0001#"+ + "\u0001#\u0003#\u01dc\b#\u0001$\u0001$\u0001$\u0003$\u01e1\b$\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01ea\b&\n&\f&\u01ed\t&\u0001"+ + "\'\u0001\'\u0003\'\u01f1\b\'\u0001\'\u0001\'\u0003\'\u01f5\b\'\u0001("+ + "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ + "*\u0201\b*\n*\f*\u0204\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0003,\u020e\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0005/\u021a\b/\n/\f/\u021d\t/\u00010\u00010\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00032\u0227\b2\u00013\u00033\u022a"+ + "\b3\u00013\u00013\u00014\u00034\u022f\b4\u00014\u00014\u00015\u00015\u0001"+ + "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0245\b:\u0001:\u0001:\u0001"+ + ":\u0001:\u0005:\u024b\b:\n:\f:\u024e\t:\u0003:\u0250\b:\u0001;\u0001;"+ + "\u0001;\u0003;\u0255\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0262\b=\u0001>\u0003>\u0265\b>\u0001"+ + ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026e\b?\u0001@\u0001"+ + "@\u0001@\u0001@\u0005@\u0274\b@\n@\f@\u0277\t@\u0001A\u0001A\u0001A\u0000"+ + "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "BD\u0002\u0000\u001e\u001eQQ\u0001\u0000HI\u0002\u0000##((\u0002\u0000"+ + "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u0294\u0000"+ + "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ + "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ + "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ + "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f4\u0001"+ + "\u0000\u0000\u0000\u0012\u00fa\u0001\u0000\u0000\u0000\u0014\u010f\u0001"+ + "\u0000\u0000\u0000\u0016\u0119\u0001\u0000\u0000\u0000\u0018\u0128\u0001"+ + "\u0000\u0000\u0000\u001a\u012a\u0001\u0000\u0000\u0000\u001c\u012c\u0001"+ + "\u0000\u0000\u0000\u001e\u012f\u0001\u0000\u0000\u0000 \u013a\u0001\u0000"+ + "\u0000\u0000\"\u013e\u0001\u0000\u0000\u0000$\u014d\u0001\u0000\u0000"+ + "\u0000&\u0151\u0001\u0000\u0000\u0000(\u0153\u0001\u0000\u0000\u0000*"+ + "\u0157\u0001\u0000\u0000\u0000,\u0159\u0001\u0000\u0000\u0000.\u0162\u0001"+ + "\u0000\u0000\u00000\u0166\u0001\u0000\u0000\u00002\u0176\u0001\u0000\u0000"+ + "\u00004\u0179\u0001\u0000\u0000\u00006\u0181\u0001\u0000\u0000\u00008"+ + "\u0189\u0001\u0000\u0000\u0000:\u018e\u0001\u0000\u0000\u0000<\u0196\u0001"+ + "\u0000\u0000\u0000>\u019e\u0001\u0000\u0000\u0000@\u01a6\u0001\u0000\u0000"+ + "\u0000B\u01ab\u0001\u0000\u0000\u0000D\u01d7\u0001\u0000\u0000\u0000F"+ + "\u01db\u0001\u0000\u0000\u0000H\u01e0\u0001\u0000\u0000\u0000J\u01e2\u0001"+ + "\u0000\u0000\u0000L\u01e5\u0001\u0000\u0000\u0000N\u01ee\u0001\u0000\u0000"+ + "\u0000P\u01f6\u0001\u0000\u0000\u0000R\u01f9\u0001\u0000\u0000\u0000T"+ + "\u01fc\u0001\u0000\u0000\u0000V\u0205\u0001\u0000\u0000\u0000X\u0209\u0001"+ + "\u0000\u0000\u0000Z\u020f\u0001\u0000\u0000\u0000\\\u0213\u0001\u0000"+ + "\u0000\u0000^\u0216\u0001\u0000\u0000\u0000`\u021e\u0001\u0000\u0000\u0000"+ + "b\u0222\u0001\u0000\u0000\u0000d\u0226\u0001\u0000\u0000\u0000f\u0229"+ + "\u0001\u0000\u0000\u0000h\u022e\u0001\u0000\u0000\u0000j\u0232\u0001\u0000"+ + "\u0000\u0000l\u0234\u0001\u0000\u0000\u0000n\u0236\u0001\u0000\u0000\u0000"+ + "p\u0239\u0001\u0000\u0000\u0000r\u023d\u0001\u0000\u0000\u0000t\u0240"+ + "\u0001\u0000\u0000\u0000v\u0254\u0001\u0000\u0000\u0000x\u0258\u0001\u0000"+ + "\u0000\u0000z\u025d\u0001\u0000\u0000\u0000|\u0264\u0001\u0000\u0000\u0000"+ + "~\u026a\u0001\u0000\u0000\u0000\u0080\u026f\u0001\u0000\u0000\u0000\u0082"+ + "\u0278\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ + "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ + "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ + "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ + "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ + "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ + "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ + "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ + "\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094\u0099\u0003"+ + "\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ + "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ + "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ + "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ + "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ + "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ + "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ + "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ + "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ + "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ + "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ + "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ + "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ + "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ + "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ + "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ + "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ + "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ + "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ + "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ + "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ + "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ + ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ + "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ + "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ + "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ + "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ + "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ + "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ + "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ + "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ + "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ + "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ + "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ + "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ + "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ + "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ + "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ + "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ + "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ + "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ + "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ + "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ + "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ + "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ + "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ + "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ + "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ + "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ec\u0003:"+ + "\u001d\u0000\u00ec\u00ed\u0005&\u0000\u0000\u00ed\u00ee\u0003D\"\u0000"+ + "\u00ee\u000f\u0001\u0000\u0000\u0000\u00ef\u00f5\u0003\u0012\t\u0000\u00f0"+ + "\u00f1\u0003\u0012\t\u0000\u00f1\u00f2\u0003l6\u0000\u00f2\u00f3\u0003"+ + "\u0012\t\u0000\u00f3\u00f5\u0001\u0000\u0000\u0000\u00f4\u00ef\u0001\u0000"+ + "\u0000\u0000\u00f4\u00f0\u0001\u0000\u0000\u0000\u00f5\u0011\u0001\u0000"+ + "\u0000\u0000\u00f6\u00f7\u0006\t\uffff\uffff\u0000\u00f7\u00fb\u0003\u0014"+ + "\n\u0000\u00f8\u00f9\u0007\u0000\u0000\u0000\u00f9\u00fb\u0003\u0012\t"+ + "\u0003\u00fa\u00f6\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000"+ + "\u0000\u00fb\u0104\u0001\u0000\u0000\u0000\u00fc\u00fd\n\u0002\u0000\u0000"+ + "\u00fd\u00fe\u0007\u0001\u0000\u0000\u00fe\u0103\u0003\u0012\t\u0003\u00ff"+ + "\u0100\n\u0001\u0000\u0000\u0100\u0101\u0007\u0000\u0000\u0000\u0101\u0103"+ + "\u0003\u0012\t\u0002\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00ff\u0001"+ + "\u0000\u0000\u0000\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001"+ + "\u0000\u0000\u0000\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0013\u0001"+ + "\u0000\u0000\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u0108\u0006"+ + "\n\uffff\uffff\u0000\u0108\u0110\u0003D\"\u0000\u0109\u0110\u0003:\u001d"+ + "\u0000\u010a\u0110\u0003\u0016\u000b\u0000\u010b\u010c\u00050\u0000\u0000"+ + "\u010c\u010d\u0003\n\u0005\u0000\u010d\u010e\u00057\u0000\u0000\u010e"+ + "\u0110\u0001\u0000\u0000\u0000\u010f\u0107\u0001\u0000\u0000\u0000\u010f"+ + "\u0109\u0001\u0000\u0000\u0000\u010f\u010a\u0001\u0000\u0000\u0000\u010f"+ + "\u010b\u0001\u0000\u0000\u0000\u0110\u0116\u0001\u0000\u0000\u0000\u0111"+ + "\u0112\n\u0001\u0000\u0000\u0112\u0113\u0005%\u0000\u0000\u0113\u0115"+ + "\u0003\u001a\r\u0000\u0114\u0111\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ + "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ + "\u0000\u0000\u0000\u0117\u0015\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0003\u0018\f\u0000\u011a\u0124\u00050"+ + "\u0000\u0000\u011b\u0125\u0005B\u0000\u0000\u011c\u0121\u0003\n\u0005"+ + "\u0000\u011d\u011e\u0005\'\u0000\u0000\u011e\u0120\u0003\n\u0005\u0000"+ + "\u011f\u011d\u0001\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000"+ + "\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0122\u0001\u0000\u0000\u0000"+ + "\u0122\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000"+ + "\u0124\u011b\u0001\u0000\u0000\u0000\u0124\u011c\u0001\u0000\u0000\u0000"+ + "\u0124\u0125\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u00057\u0000\u0000\u0127\u0017\u0001\u0000\u0000\u0000\u0128"+ + "\u0129\u0003H$\u0000\u0129\u0019\u0001\u0000\u0000\u0000\u012a\u012b\u0003"+ + "@ \u0000\u012b\u001b\u0001\u0000\u0000\u0000\u012c\u012d\u0005\f\u0000"+ + "\u0000\u012d\u012e\u0003\u001e\u000f\u0000\u012e\u001d\u0001\u0000\u0000"+ + "\u0000\u012f\u0134\u0003 \u0010\u0000\u0130\u0131\u0005\'\u0000\u0000"+ + "\u0131\u0133\u0003 \u0010\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133"+ + "\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000\u0134"+ + "\u0135\u0001\u0000\u0000\u0000\u0135\u001f\u0001\u0000\u0000\u0000\u0136"+ + "\u0134\u0001\u0000\u0000\u0000\u0137\u0138\u0003:\u001d\u0000\u0138\u0139"+ + "\u0005$\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a\u0137\u0001"+ + "\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013c\u0001"+ + "\u0000\u0000\u0000\u013c\u013d\u0003\n\u0005\u0000\u013d!\u0001\u0000"+ + "\u0000\u0000\u013e\u013f\u0005\u0006\u0000\u0000\u013f\u0144\u0003$\u0012"+ + "\u0000\u0140\u0141\u0005\'\u0000\u0000\u0141\u0143\u0003$\u0012\u0000"+ + "\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000"+ + "\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000"+ + "\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000"+ + "\u0147\u0149\u0003*\u0015\u0000\u0148\u0147\u0001\u0000\u0000\u0000\u0148"+ + "\u0149\u0001\u0000\u0000\u0000\u0149#\u0001\u0000\u0000\u0000\u014a\u014b"+ + "\u0003&\u0013\u0000\u014b\u014c\u0005&\u0000\u0000\u014c\u014e\u0001\u0000"+ + "\u0000\u0000\u014d\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ + "\u0000\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f\u0150\u0003(\u0014"+ + "\u0000\u0150%\u0001\u0000\u0000\u0000\u0151\u0152\u0005Q\u0000\u0000\u0152"+ + "\'\u0001\u0000\u0000\u0000\u0153\u0154\u0007\u0002\u0000\u0000\u0154)"+ + "\u0001\u0000\u0000\u0000\u0155\u0158\u0003,\u0016\u0000\u0156\u0158\u0003"+ + ".\u0017\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0157\u0156\u0001\u0000"+ + "\u0000\u0000\u0158+\u0001\u0000\u0000\u0000\u0159\u015a\u0005P\u0000\u0000"+ + "\u015a\u015f\u0005Q\u0000\u0000\u015b\u015c\u0005\'\u0000\u0000\u015c"+ + "\u015e\u0005Q\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u0161"+ + "\u0001\u0000\u0000\u0000\u015f\u015d\u0001\u0000\u0000\u0000\u015f\u0160"+ + "\u0001\u0000\u0000\u0000\u0160-\u0001\u0000\u0000\u0000\u0161\u015f\u0001"+ + "\u0000\u0000\u0000\u0162\u0163\u0005F\u0000\u0000\u0163\u0164\u0003,\u0016"+ + "\u0000\u0164\u0165\u0005G\u0000\u0000\u0165/\u0001\u0000\u0000\u0000\u0166"+ + "\u0167\u0005\u0013\u0000\u0000\u0167\u016c\u0003$\u0012\u0000\u0168\u0169"+ + "\u0005\'\u0000\u0000\u0169\u016b\u0003$\u0012\u0000\u016a\u0168\u0001"+ + "\u0000\u0000\u0000\u016b\u016e\u0001\u0000\u0000\u0000\u016c\u016a\u0001"+ + "\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d\u0170\u0001"+ + "\u0000\u0000\u0000\u016e\u016c\u0001\u0000\u0000\u0000\u016f\u0171\u0003"+ + "6\u001b\u0000\u0170\u016f\u0001\u0000\u0000\u0000\u0170\u0171\u0001\u0000"+ + "\u0000\u0000\u0171\u0174\u0001\u0000\u0000\u0000\u0172\u0173\u0005!\u0000"+ + "\u0000\u0173\u0175\u0003\u001e\u000f\u0000\u0174\u0172\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u01751\u0001\u0000\u0000\u0000"+ + "\u0176\u0177\u0005\u0004\u0000\u0000\u0177\u0178\u0003\u001e\u000f\u0000"+ + "\u01783\u0001\u0000\u0000\u0000\u0179\u017b\u0005\u000f\u0000\u0000\u017a"+ + "\u017c\u00036\u001b\u0000\u017b\u017a\u0001\u0000\u0000\u0000\u017b\u017c"+ + "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017e"+ + "\u0005!\u0000\u0000\u017e\u0180\u0003\u001e\u000f\u0000\u017f\u017d\u0001"+ + "\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000\u01805\u0001\u0000"+ + "\u0000\u0000\u0181\u0186\u00038\u001c\u0000\u0182\u0183\u0005\'\u0000"+ + "\u0000\u0183\u0185\u00038\u001c\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0185\u0188\u0001\u0000\u0000\u0000\u0186\u0184\u0001\u0000\u0000\u0000"+ + "\u0186\u0187\u0001\u0000\u0000\u0000\u01877\u0001\u0000\u0000\u0000\u0188"+ + "\u0186\u0001\u0000\u0000\u0000\u0189\u018c\u0003 \u0010\u0000\u018a\u018b"+ + "\u0005\u0010\u0000\u0000\u018b\u018d\u0003\n\u0005\u0000\u018c\u018a\u0001"+ + "\u0000\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d9\u0001\u0000"+ + "\u0000\u0000\u018e\u0193\u0003H$\u0000\u018f\u0190\u0005)\u0000\u0000"+ + "\u0190\u0192\u0003H$\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0195"+ + "\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0194"+ + "\u0001\u0000\u0000\u0000\u0194;\u0001\u0000\u0000\u0000\u0195\u0193\u0001"+ + "\u0000\u0000\u0000\u0196\u019b\u0003B!\u0000\u0197\u0198\u0005)\u0000"+ + "\u0000\u0198\u019a\u0003B!\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a"+ + "\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b"+ + "\u019c\u0001\u0000\u0000\u0000\u019c=\u0001\u0000\u0000\u0000\u019d\u019b"+ + "\u0001\u0000\u0000\u0000\u019e\u01a3\u0003<\u001e\u0000\u019f\u01a0\u0005"+ + "\'\u0000\u0000\u01a0\u01a2\u0003<\u001e\u0000\u01a1\u019f\u0001\u0000"+ + "\u0000\u0000\u01a2\u01a5\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4?\u0001\u0000\u0000"+ + "\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a7\u0007\u0003\u0000"+ + "\u0000\u01a7A\u0001\u0000\u0000\u0000\u01a8\u01ac\u0005U\u0000\u0000\u01a9"+ + "\u01aa\u0004!\n\u0000\u01aa\u01ac\u0003F#\u0000\u01ab\u01a8\u0001\u0000"+ + "\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01acC\u0001\u0000\u0000"+ + "\u0000\u01ad\u01d8\u00052\u0000\u0000\u01ae\u01af\u0003h4\u0000\u01af"+ + "\u01b0\u0005H\u0000\u0000\u01b0\u01d8\u0001\u0000\u0000\u0000\u01b1\u01d8"+ + "\u0003f3\u0000\u01b2\u01d8\u0003h4\u0000\u01b3\u01d8\u0003b1\u0000\u01b4"+ + "\u01d8\u0003F#\u0000\u01b5\u01d8\u0003j5\u0000\u01b6\u01b7\u0005F\u0000"+ + "\u0000\u01b7\u01bc\u0003d2\u0000\u01b8\u01b9\u0005\'\u0000\u0000\u01b9"+ + "\u01bb\u0003d2\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001"+ + "\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001"+ + "\u0000\u0000\u0000\u01bd\u01bf\u0001\u0000\u0000\u0000\u01be\u01bc\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c0\u0005G\u0000\u0000\u01c0\u01d8\u0001\u0000"+ + "\u0000\u0000\u01c1\u01c2\u0005F\u0000\u0000\u01c2\u01c7\u0003b1\u0000"+ + "\u01c3\u01c4\u0005\'\u0000\u0000\u01c4\u01c6\u0003b1\u0000\u01c5\u01c3"+ + "\u0001\u0000\u0000\u0000\u01c6\u01c9\u0001\u0000\u0000\u0000\u01c7\u01c5"+ + "\u0001\u0000\u0000\u0000\u01c7\u01c8\u0001\u0000\u0000\u0000\u01c8\u01ca"+ + "\u0001\u0000\u0000\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cb"+ + "\u0005G\u0000\u0000\u01cb\u01d8\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005"+ + "F\u0000\u0000\u01cd\u01d2\u0003j5\u0000\u01ce\u01cf\u0005\'\u0000\u0000"+ + "\u01cf\u01d1\u0003j5\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d4"+ + "\u0001\u0000\u0000\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d3"+ + "\u0001\u0000\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d2"+ + "\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005G\u0000\u0000\u01d6\u01d8\u0001"+ + "\u0000\u0000\u0000\u01d7\u01ad\u0001\u0000\u0000\u0000\u01d7\u01ae\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b1\u0001\u0000\u0000\u0000\u01d7\u01b2\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b3\u0001\u0000\u0000\u0000\u01d7\u01b4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b5\u0001\u0000\u0000\u0000\u01d7\u01b6\u0001"+ + "\u0000\u0000\u0000\u01d7\u01c1\u0001\u0000\u0000\u0000\u01d7\u01cc\u0001"+ + "\u0000\u0000\u0000\u01d8E\u0001\u0000\u0000\u0000\u01d9\u01dc\u00055\u0000"+ + "\u0000\u01da\u01dc\u0005E\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000"+ + "\u01db\u01da\u0001\u0000\u0000\u0000\u01dcG\u0001\u0000\u0000\u0000\u01dd"+ + "\u01e1\u0003@ \u0000\u01de\u01df\u0004$\u000b\u0000\u01df\u01e1\u0003"+ + "F#\u0000\u01e0\u01dd\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000\u0000"+ + "\u0000\u01e1I\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005\t\u0000\u0000"+ + "\u01e3\u01e4\u0005\u001f\u0000\u0000\u01e4K\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0005\u000e\u0000\u0000\u01e6\u01eb\u0003N\'\u0000\u01e7\u01e8"+ + "\u0005\'\u0000\u0000\u01e8\u01ea\u0003N\'\u0000\u01e9\u01e7\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000"+ + "\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecM\u0001\u0000\u0000"+ + "\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ee\u01f0\u0003\n\u0005\u0000"+ + "\u01ef\u01f1\u0007\u0004\u0000\u0000\u01f0\u01ef\u0001\u0000\u0000\u0000"+ + "\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f4\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f3\u00053\u0000\u0000\u01f3\u01f5\u0007\u0005\u0000\u0000\u01f4"+ + "\u01f2\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "O\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005\b\u0000\u0000\u01f7\u01f8"+ + "\u0003>\u001f\u0000\u01f8Q\u0001\u0000\u0000\u0000\u01f9\u01fa\u0005\u0002"+ + "\u0000\u0000\u01fa\u01fb\u0003>\u001f\u0000\u01fbS\u0001\u0000\u0000\u0000"+ + "\u01fc\u01fd\u0005\u000b\u0000\u0000\u01fd\u0202\u0003V+\u0000\u01fe\u01ff"+ + "\u0005\'\u0000\u0000\u01ff\u0201\u0003V+\u0000\u0200\u01fe\u0001\u0000"+ + "\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202\u0200\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203U\u0001\u0000\u0000"+ + "\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205\u0206\u0003<\u001e\u0000"+ + "\u0206\u0207\u0005Y\u0000\u0000\u0207\u0208\u0003<\u001e\u0000\u0208W"+ + "\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0001\u0000\u0000\u020a\u020b"+ + "\u0003\u0014\n\u0000\u020b\u020d\u0003j5\u0000\u020c\u020e\u0003^/\u0000"+ + "\u020d\u020c\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000"+ + "\u020eY\u0001\u0000\u0000\u0000\u020f\u0210\u0005\u0007\u0000\u0000\u0210"+ + "\u0211\u0003\u0014\n\u0000\u0211\u0212\u0003j5\u0000\u0212[\u0001\u0000"+ + "\u0000\u0000\u0213\u0214\u0005\n\u0000\u0000\u0214\u0215\u0003:\u001d"+ + "\u0000\u0215]\u0001\u0000\u0000\u0000\u0216\u021b\u0003`0\u0000\u0217"+ + "\u0218\u0005\'\u0000\u0000\u0218\u021a\u0003`0\u0000\u0219\u0217\u0001"+ + "\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b\u0219\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c_\u0001\u0000"+ + "\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e\u021f\u0003@ \u0000"+ + "\u021f\u0220\u0005$\u0000\u0000\u0220\u0221\u0003D\"\u0000\u0221a\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0007\u0006\u0000\u0000\u0223c\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0003f3\u0000\u0225\u0227\u0003h4\u0000\u0226"+ + "\u0224\u0001\u0000\u0000\u0000\u0226\u0225\u0001\u0000\u0000\u0000\u0227"+ + "e\u0001\u0000\u0000\u0000\u0228\u022a\u0007\u0000\u0000\u0000\u0229\u0228"+ + "\u0001\u0000\u0000\u0000\u0229\u022a\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0001\u0000\u0000\u0000\u022b\u022c\u0005 \u0000\u0000\u022cg\u0001\u0000"+ + "\u0000\u0000\u022d\u022f\u0007\u0000\u0000\u0000\u022e\u022d\u0001\u0000"+ + "\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000"+ + "\u0000\u0000\u0230\u0231\u0005\u001f\u0000\u0000\u0231i\u0001\u0000\u0000"+ + "\u0000\u0232\u0233\u0005\u001e\u0000\u0000\u0233k\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0007\u0007\u0000\u0000\u0235m\u0001\u0000\u0000\u0000\u0236"+ + "\u0237\u0005\u0005\u0000\u0000\u0237\u0238\u0003p8\u0000\u0238o\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0005F\u0000\u0000\u023a\u023b\u0003\u0002"+ + "\u0001\u0000\u023b\u023c\u0005G\u0000\u0000\u023cq\u0001\u0000\u0000\u0000"+ + "\u023d\u023e\u0005\r\u0000\u0000\u023e\u023f\u0005i\u0000\u0000\u023f"+ + "s\u0001\u0000\u0000\u0000\u0240\u0241\u0005\u0003\u0000\u0000\u0241\u0244"+ + "\u0005_\u0000\u0000\u0242\u0243\u0005]\u0000\u0000\u0243\u0245\u0003<"+ + "\u001e\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000"+ + "\u0000\u0000\u0245\u024f\u0001\u0000\u0000\u0000\u0246\u0247\u0005^\u0000"+ + "\u0000\u0247\u024c\u0003v;\u0000\u0248\u0249\u0005\'\u0000\u0000\u0249"+ + "\u024b\u0003v;\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b\u024e\u0001"+ + "\u0000\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001"+ + "\u0000\u0000\u0000\u024d\u0250\u0001\u0000\u0000\u0000\u024e\u024c\u0001"+ + "\u0000\u0000\u0000\u024f\u0246\u0001\u0000\u0000\u0000\u024f\u0250\u0001"+ + "\u0000\u0000\u0000\u0250u\u0001\u0000\u0000\u0000\u0251\u0252\u0003<\u001e"+ + "\u0000\u0252\u0253\u0005$\u0000\u0000\u0253\u0255\u0001\u0000\u0000\u0000"+ + "\u0254\u0251\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000\u0000"+ + "\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0003<\u001e\u0000\u0257"+ + "w\u0001\u0000\u0000\u0000\u0258\u0259\u0005\u0012\u0000\u0000\u0259\u025a"+ + "\u0003$\u0012\u0000\u025a\u025b\u0005]\u0000\u0000\u025b\u025c\u0003>"+ + "\u001f\u0000\u025cy\u0001\u0000\u0000\u0000\u025d\u025e\u0005\u0011\u0000"+ + "\u0000\u025e\u0261\u00036\u001b\u0000\u025f\u0260\u0005!\u0000\u0000\u0260"+ + "\u0262\u0003\u001e\u000f\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0261"+ + "\u0262\u0001\u0000\u0000\u0000\u0262{\u0001\u0000\u0000\u0000\u0263\u0265"+ + "\u0007\b\u0000\u0000\u0264\u0263\u0001\u0000\u0000\u0000\u0264\u0265\u0001"+ + "\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0005"+ + "\u0014\u0000\u0000\u0267\u0268\u0003~?\u0000\u0268\u0269\u0003\u0080@"+ + "\u0000\u0269}\u0001\u0000\u0000\u0000\u026a\u026d\u0003@ \u0000\u026b"+ + "\u026c\u0005Y\u0000\u0000\u026c\u026e\u0003@ \u0000\u026d\u026b\u0001"+ + "\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u007f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0005]\u0000\u0000\u0270\u0275\u0003\u0082"+ + "A\u0000\u0271\u0272\u0005\'\u0000\u0000\u0272\u0274\u0003\u0082A\u0000"+ + "\u0273\u0271\u0001\u0000\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000"+ + "\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000"+ + "\u0276\u0081\u0001\u0000\u0000\u0000\u0277\u0275\u0001\u0000\u0000\u0000"+ + "\u0278\u0279\u0003\u0010\b\u0000\u0279\u0083\u0001\u0000\u0000\u0000="+ + "\u008f\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9"+ + "\u00f4\u00fa\u0102\u0104\u010f\u0116\u0121\u0124\u0134\u013a\u0144\u0148"+ + "\u014d\u0157\u015f\u016c\u0170\u0174\u017b\u017f\u0186\u018c\u0193\u019b"+ + "\u01a3\u01ab\u01bc\u01c7\u01d2\u01d7\u01db\u01e0\u01eb\u01f0\u01f4\u0202"+ + "\u020d\u021b\u0226\u0229\u022e\u0244\u024c\u024f\u0254\u0261\u0264\u026d"+ + "\u0275"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 556a97657635a..6071219839bab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -1052,6 +1052,54 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { } /** * {@inheritDoc} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index 56b6999615f50..afe7146923791 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -622,4 +622,32 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx) { return visitChildren(ctx); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index cf658c4a73141..0faca2541c9ad 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -941,4 +941,44 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + */ + void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + */ + void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + */ + void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + */ + void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + */ + void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + */ + void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + */ + void enterJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + */ + void exitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 86c1d1aafc33a..e91cd6670e971 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -567,4 +567,28 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitInlinestatsCommand(EsqlBaseParser.InlinestatsCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinTarget}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinTarget(EsqlBaseParser.JoinTargetContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinCondition}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinCondition(EsqlBaseParser.JoinConditionContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#joinPredicate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitJoinPredicate(EsqlBaseParser.JoinPredicateContext ctx); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index f83af534eaa72..99e03b3653f79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; +import org.elasticsearch.xpack.esql.plan.logical.join.LookupJoin; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.joni.exception.SyntaxException; @@ -68,6 +69,7 @@ import java.util.Set; import java.util.function.Function; +import static java.util.Collections.emptyList; import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; @@ -502,7 +504,7 @@ public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) @Override public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { if (false == Build.current().isSnapshot()) { - throw new ParsingException(source(ctx), "LOOKUP is in preview and only available in SNAPSHOT build"); + throw new ParsingException(source(ctx), "LOOKUP__ is in preview and only available in SNAPSHOT build"); } var source = source(ctx); @@ -524,4 +526,42 @@ public PlanFactory visitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { return p -> new Lookup(source, p, tableName, matchFields, null /* localRelation will be resolved later*/); } + public PlanFactory visitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { + var source = source(ctx); + if (false == Build.current().isSnapshot()) { + throw new ParsingException(source, "JOIN is in preview and only available in SNAPSHOT build"); + } + + if (ctx.type != null && ctx.type.getType() != EsqlBaseParser.DEV_JOIN_LOOKUP) { + String joinType = ctx.type == null ? "(INNER)" : ctx.type.getText(); + throw new ParsingException(source, "only LOOKUP JOIN available, {} JOIN unsupported at the moment", joinType); + } + + var target = ctx.joinTarget(); + UnresolvedRelation right = new UnresolvedRelation( + source(target), + new TableIdentifier(source(target.index), null, visitIdentifier(target.index)), + false, + emptyList(), + IndexMode.LOOKUP, + null, + "???" + ); + + var condition = ctx.joinCondition(); + + // ON only with qualified names + var predicates = expressions(condition.joinPredicate()); + List joinFields = new ArrayList<>(predicates.size()); + for (var f : predicates) { + // verify each field is an unresolved attribute + if (f instanceof UnresolvedAttribute ua) { + joinFields.add(ua); + } else { + throw new ParsingException(f.source(), "JOIN ON clause only supports fields at the moment, found [{}]", f.sourceText()); + } + } + + return p -> new LookupJoin(source, p, right, joinFields); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index 0705ae7f778cd..484a655fc2988 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -18,7 +18,7 @@ public class ParsingException extends EsqlClientException { public ParsingException(String message, Exception cause, int line, int charPositionInLine) { super(message, cause); this.line = line; - this.charPositionInLine = charPositionInLine; + this.charPositionInLine = charPositionInLine + 1; } ParsingException(String message, Object... args) { @@ -42,7 +42,7 @@ public int getLineNumber() { } public int getColumnNumber() { - return charPositionInLine + 1; + return charPositionInLine; } public String getErrorMessage() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java new file mode 100644 index 0000000000000..40b91beaee3eb --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.xpack.esql.plan.logical.Aggregate; +import org.elasticsearch.xpack.esql.plan.logical.Dissect; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.elasticsearch.xpack.esql.plan.logical.EsRelation; +import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.Filter; +import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.InlineStats; +import org.elasticsearch.xpack.esql.plan.logical.Limit; +import org.elasticsearch.xpack.esql.plan.logical.Lookup; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; +import org.elasticsearch.xpack.esql.plan.logical.join.Join; +import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.DissectExec; +import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; +import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; +import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.EvalExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; +import org.elasticsearch.xpack.esql.plan.physical.FilterExec; +import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.GrokExec; +import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; +import org.elasticsearch.xpack.esql.plan.physical.LimitExec; +import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; +import org.elasticsearch.xpack.esql.plan.physical.OrderExec; +import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; +import org.elasticsearch.xpack.esql.plan.physical.RowExec; +import org.elasticsearch.xpack.esql.plan.physical.ShowExec; +import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; +import org.elasticsearch.xpack.esql.plan.physical.TopNExec; + +import java.util.ArrayList; +import java.util.List; + +public class PlanWritables { + + public static List getNamedWriteables() { + List entries = new ArrayList<>(); + entries.addAll(logical()); + entries.addAll(phsyical()); + return entries; + } + + public static List logical() { + return List.of( + Aggregate.ENTRY, + Dissect.ENTRY, + Enrich.ENTRY, + EsRelation.ENTRY, + EsqlProject.ENTRY, + Eval.ENTRY, + Filter.ENTRY, + Grok.ENTRY, + InlineStats.ENTRY, + InlineJoin.ENTRY, + Join.ENTRY, + LocalRelation.ENTRY, + Limit.ENTRY, + Lookup.ENTRY, + MvExpand.ENTRY, + OrderBy.ENTRY, + Project.ENTRY, + TopN.ENTRY + ); + } + + public static List phsyical() { + return List.of( + AggregateExec.ENTRY, + DissectExec.ENTRY, + EnrichExec.ENTRY, + EsQueryExec.ENTRY, + EsSourceExec.ENTRY, + EvalExec.ENTRY, + ExchangeExec.ENTRY, + ExchangeSinkExec.ENTRY, + ExchangeSourceExec.ENTRY, + FieldExtractExec.ENTRY, + FilterExec.ENTRY, + FragmentExec.ENTRY, + GrokExec.ENTRY, + HashJoinExec.ENTRY, + LimitExec.ENTRY, + LocalSourceExec.ENTRY, + MvExpandExec.ENTRY, + OrderExec.ENTRY, + ProjectExec.ENTRY, + RowExec.ENTRY, + ShowExec.ENTRY, + SubqueryExec.ENTRY, + TopNExec.ENTRY + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java index e34e0b8e27863..ef8c3983faf2e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/QueryPlan.java @@ -59,12 +59,17 @@ public AttributeSet inputSet() { */ public List expressions() { if (lazyExpressions == null) { - lazyExpressions = new ArrayList<>(); - forEachPropertyOnly(Object.class, e -> doForEachExpression(e, lazyExpressions::add)); + lazyExpressions = computeExpressions(); } return lazyExpressions; } + protected List computeExpressions() { + List expressions = new ArrayList<>(); + forEachPropertyOnly(Object.class, e -> doForEachExpression(e, expressions::add)); + return expressions; + } + /** * The attributes required to be in the {@link QueryPlan#inputSet()} for this plan to be valid. * Excludes generated references. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index eb72009638396..794a52b8e3f89 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -181,7 +181,12 @@ public boolean equals(Object obj) { @Override public String nodeString() { - return nodeName() + "[" + index + "]" + NodeUtils.limitedToString(attrs); + return nodeName() + + "[" + + index + + "]" + + (indexMode != IndexMode.STANDARD ? "[" + indexMode.name() + "]" : "") + + NodeUtils.limitedToString(attrs); } public static IndexMode readIndexMode(StreamInput in) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java index 9e854450a2d34..4211f8a0d45b6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/InlineStats.java @@ -20,7 +20,7 @@ import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.io.IOException; import java.util.ArrayList; @@ -118,7 +118,7 @@ private JoinConfig joinConfig() { } } } - return new JoinConfig(JoinType.LEFT, namedGroupings, leftFields, rightFields); + return new JoinConfig(JoinTypes.LEFT, namedGroupings, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java index e07dd9e14649e..e845c25bd3b32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/LogicalPlan.java @@ -6,15 +6,10 @@ */ package org.elasticsearch.xpack.esql.plan.logical; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.capabilities.Resolvable; import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.QueryPlan; -import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; -import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import java.util.List; @@ -23,29 +18,6 @@ * For example, a logical plan in English would be: "I want to get from DEN to SFO". */ public abstract class LogicalPlan extends QueryPlan implements Resolvable { - public static List getNamedWriteables() { - return List.of( - Aggregate.ENTRY, - Dissect.ENTRY, - Enrich.ENTRY, - EsRelation.ENTRY, - EsqlProject.ENTRY, - Eval.ENTRY, - Filter.ENTRY, - Grok.ENTRY, - InlineStats.ENTRY, - InlineJoin.ENTRY, - Join.ENTRY, - LocalRelation.ENTRY, - Limit.ENTRY, - Lookup.ENTRY, - MvExpand.ENTRY, - OrderBy.ENTRY, - Project.ENTRY, - TopN.ENTRY - ); - } - /** * Order is important in the enum; any values should be added at the end. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java index 70f8a24cfc87e..6e7f421003292 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Lookup.java @@ -19,7 +19,7 @@ import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import java.io.IOException; @@ -114,7 +114,7 @@ public JoinConfig joinConfig() { } } } - return new JoinConfig(JoinType.LEFT, matchFields, leftFields, rightFields); + return new JoinConfig(JoinTypes.LEFT, matchFields, leftFields, rightFields); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java index 0dfbe4936e4e3..384c3f7a340ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/UnresolvedRelation.java @@ -25,6 +25,10 @@ public class UnresolvedRelation extends LeafPlan implements Unresolvable { private final TableIdentifier table; private final boolean frozen; private final List metadataFields; + /* + * Expected indexMode based on the declaration - used later for verification + * at resolution time. + */ private final IndexMode indexMode; private final String unresolvedMsg; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java index f9be61ed2c8d7..0e182646d914a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/Join.java @@ -10,9 +10,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.AttributeSet; -import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; @@ -23,12 +22,12 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; -import java.util.Set; -import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.RIGHT; public class Join extends BinaryPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(LogicalPlan.class, "Join", Join::new); @@ -92,11 +91,6 @@ protected NodeInfo info() { ); } - @Override - public Join replaceChildren(LogicalPlan left, LogicalPlan right) { - return new Join(source(), left, right, config); - } - @Override public List output() { if (lazyOutput == null) { @@ -106,35 +100,42 @@ public List output() { } /** - * Merge output fields. - * Currently only implemented for LEFT JOINs; the rightOutput shadows the leftOutput, except for any attributes that - * occur in the join's matchFields. + * Combine the two lists of attributes into one. + * In case of (name) conflicts, specify which sides wins, that is overrides the other column - the left or the right. */ public static List computeOutput(List leftOutput, List rightOutput, JoinConfig config) { - AttributeSet matchFieldSet = new AttributeSet(config.matchFields()); - Set matchFieldNames = new HashSet<>(Expressions.names(config.matchFields())); - return switch (config.type()) { - case LEFT -> { - // Right side becomes nullable. - List fieldsAddedFromRight = removeCollisionsWithMatchFields(rightOutput, matchFieldSet, matchFieldNames); - yield mergeOutputAttributes(fieldsAddedFromRight, leftOutput); - } - default -> throw new UnsupportedOperationException("Other JOINs than LEFT not supported"); - }; + JoinType joinType = config.type(); + List output; + // TODO: make the other side nullable + if (LEFT.equals(joinType)) { + // right side becomes nullable and overrides left + // output = merge(leftOutput, makeNullable(rightOutput)); + output = merge(leftOutput, rightOutput); + } else if (RIGHT.equals(joinType)) { + // left side becomes nullable and overrides right + // output = merge(makeNullable(leftOutput), rightOutput); + output = merge(leftOutput, rightOutput); + } else { + throw new IllegalArgumentException(joinType.joinName() + " unsupported"); + } + return output; } - private static List removeCollisionsWithMatchFields( - List attributes, - AttributeSet matchFields, - Set matchFieldNames - ) { - List result = new ArrayList<>(); - for (Attribute attr : attributes) { - if ((matchFields.contains(attr) || matchFieldNames.contains(attr.name())) == false) { - result.add(attr); - } + /** + * Merge the two lists of attributes into one and preserves order. + */ + private static List merge(List left, List right) { + // use linked hash map to preserve order + Map nameToAttribute = Maps.newLinkedHashMapWithExpectedSize(left.size() + right.size()); + for (Attribute a : left) { + nameToAttribute.put(a.name(), a); + } + for (Attribute a : right) { + // override the existing entry in place + nameToAttribute.compute(a.name(), (name, existing) -> a); } - return result; + + return new ArrayList<>(nameToAttribute.values()); } /** @@ -160,7 +161,7 @@ public static List makeReference(List output) { return out; } - public static List makeNullable(List output) { + private static List makeNullable(List output) { List out = new ArrayList<>(output.size()); for (Attribute a : output) { out.add(a.withNullability(Nullability.TRUE)); @@ -181,6 +182,15 @@ public boolean resolved() { return childrenResolved() && expressionsResolved(); } + public Join withConfig(JoinConfig config) { + return new Join(source(), left(), right(), config); + } + + @Override + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new Join(source(), left, right, config); + } + @Override public String commandName() { return "JOIN"; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java index 68ad50f2f67a0..383606d6ccbed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinConfig.java @@ -22,12 +22,16 @@ * @param leftFields matched with the right fields * @param rightFields matched with the left fields */ +// TODO: this class needs refactoring into a more general form (expressions) since it's currently contains +// both the condition (equi-join) between the left and right field as well as the output of the join keys +// which makes sense only for USING clause - which is better resolved in the analyzer (based on the names) +// hence why for now the attributes are set inside the analyzer public record JoinConfig(JoinType type, List matchFields, List leftFields, List rightFields) implements Writeable { public JoinConfig(StreamInput in) throws IOException { this( - JoinType.readFrom(in), + JoinTypes.readFrom(in), in.readNamedWriteableCollectionAsList(Attribute.class), in.readNamedWriteableCollectionAsList(Attribute.class), in.readNamedWriteableCollectionAsList(Attribute.class) @@ -43,6 +47,9 @@ public void writeTo(StreamOutput out) throws IOException { } public boolean expressionsResolved() { - return Resolvables.resolved(matchFields) && Resolvables.resolved(leftFields) && Resolvables.resolved(rightFields); + return type.resolved() + && Resolvables.resolved(matchFields) + && Resolvables.resolved(leftFields) + && Resolvables.resolved(rightFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java index c3095efc9e623..a309387b1f0a2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinType.java @@ -7,46 +7,15 @@ package org.elasticsearch.xpack.esql.plan.logical.join; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import java.io.IOException; +public interface JoinType extends Writeable { -public enum JoinType implements Writeable { - INNER(0, "INNER"), - LEFT(1, "LEFT OUTER"), - RIGHT(2, "RIGHT OUTER"), - FULL(3, "FULL OUTER"), - CROSS(4, "CROSS"); - - private final byte id; - private final String name; - - JoinType(int id, String name) { - this.id = (byte) id; - this.name = name; - } - - @Override - public String toString() { - return name; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeByte(id); + default String joinName() { + return getClass().getSimpleName(); } - public static JoinType readFrom(StreamInput in) throws IOException { - byte id = in.readByte(); - return switch (id) { - case 0 -> INNER; - case 1 -> LEFT; - case 2 -> RIGHT; - case 3 -> FULL; - case 4 -> CROSS; - default -> throw new IllegalArgumentException("unsupported join [" + id + "]"); - }; + default boolean resolved() { + return true; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java new file mode 100644 index 0000000000000..9d3471bc356f7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/JoinTypes.java @@ -0,0 +1,155 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.xpack.esql.core.capabilities.Resolvables; +import org.elasticsearch.xpack.esql.core.expression.Attribute; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Utility class defining the concrete types of joins supported by ESQL. + */ +public class JoinTypes { + + private JoinTypes() {} + + public static JoinType INNER = CoreJoinType.INNER; + public static JoinType LEFT = CoreJoinType.LEFT; + public static JoinType RIGHT = CoreJoinType.RIGHT; + public static JoinType FULL = CoreJoinType.FULL; + public static JoinType CROSS = CoreJoinType.CROSS; + + private static Map JOIN_TYPES; + + static { + CoreJoinType[] types = CoreJoinType.values(); + JOIN_TYPES = Maps.newMapWithExpectedSize(types.length); + for (CoreJoinType type : types) { + JOIN_TYPES.put(type.id, type); + } + } + + /** + * The predefined core join types. Implements as enum for easy comparison and serialization. + */ + private enum CoreJoinType implements JoinType { + INNER(1, "INNER"), + LEFT(2, "LEFT OUTER"), + RIGHT(3, "RIGHT OUTER"), + FULL(4, "FULL OUTER"), + CROSS(5, "CROSS"); + + private final String name; + private final byte id; + + CoreJoinType(int id, String name) { + this.id = (byte) id; + this.name = name; + } + + @Override + public String joinName() { + return name; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByte(id); + } + } + + /** + * Join type for the USING clause - shorthand for defining an equi-join (equality join meaning the condition checks if columns across + * each side of the join are equal). + * One important difference is that the USING clause returns the join column only once, at the beginning of the result set. + */ + public static class UsingJoinType implements JoinType { + private final List columns; + private final JoinType coreJoin; + + public UsingJoinType(JoinType coreJoin, List columns) { + this.columns = columns; + this.coreJoin = coreJoin; + } + + @Override + public String joinName() { + return coreJoin.joinName() + " USING " + columns.toString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new IllegalArgumentException("USING join type should not be serialized due to being rewritten"); + } + + public JoinType coreJoin() { + return coreJoin; + } + + public List columns() { + return columns; + } + + @Override + public boolean resolved() { + return Resolvables.resolved(columns); + } + + @Override + public int hashCode() { + return Objects.hash(columns, coreJoin); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UsingJoinType that = (UsingJoinType) o; + return Objects.equals(columns, that.columns) && coreJoin == that.coreJoin; + } + + @Override + public String toString() { + return joinName(); + } + } + + /** + * Private class so it doesn't get used yet it is defined to showcase why the join type was defined as an interface instead of a simpler + * enum. + */ + private abstract static class NaturalJoinType implements JoinType { + + private final JoinType joinType; + + private NaturalJoinType(JoinType joinType) { + this.joinType = joinType; + } + + @Override + public String joinName() { + return "NATURAL " + joinType.joinName(); + } + } + + public static JoinType readFrom(StreamInput in) throws IOException { + byte id = in.readByte(); + JoinType type = JOIN_TYPES.get(id); + if (type == null) { + throw new IllegalArgumentException("unsupported join [" + id + "]"); + } + ; + return type; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java new file mode 100644 index 0000000000000..2ee9213f45b36 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/join/LookupJoin.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.logical.join; + +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.Project; +import org.elasticsearch.xpack.esql.plan.logical.SurrogateLogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.UsingJoinType; + +import java.util.List; +import java.util.Objects; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes.LEFT; + +/** + * Lookup join - specialized LEFT (OUTER) JOIN between the main left side and a lookup index (index_mode = lookup) on the right. + */ +public class LookupJoin extends Join implements SurrogateLogicalPlan { + + private final List output; + + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, List joinFields) { + this(source, left, right, new UsingJoinType(LEFT, joinFields), emptyList(), emptyList(), emptyList(), emptyList()); + } + + public LookupJoin( + Source source, + LogicalPlan left, + LogicalPlan right, + JoinType type, + List joinFields, + List leftFields, + List rightFields, + List output + ) { + this(source, left, right, new JoinConfig(type, joinFields, leftFields, rightFields), output); + } + + public LookupJoin(Source source, LogicalPlan left, LogicalPlan right, JoinConfig joinConfig, List output) { + super(source, left, right, joinConfig); + this.output = output; + } + + /** + * Translate the expression into a regular join with a Projection on top, to deal with serialization & co. + */ + @Override + public LogicalPlan surrogate() { + JoinConfig cfg = config(); + JoinConfig newConfig = new JoinConfig(LEFT, cfg.matchFields(), cfg.leftFields(), cfg.rightFields()); + Join normalized = new Join(source(), left(), right(), newConfig); + // TODO: decide whether to introduce USING or just basic ON semantics - keep the ordering out for now + return new Project(source(), normalized, output); + } + + public List output() { + return output; + } + + @Override + public Join replaceChildren(LogicalPlan left, LogicalPlan right) { + return new LookupJoin(source(), left, right, config(), output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create( + this, + LookupJoin::new, + left(), + right(), + config().type(), + config().matchFields(), + config().leftFields(), + config().rightFields(), + output + ); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), output); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj) == false) { + return false; + } + + LookupJoin other = (LookupJoin) obj; + return Objects.equals(output, other.output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java new file mode 100644 index 0000000000000..e01451ceaecac --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/LookupJoinExec.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plan.physical; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; + +public class LookupJoinExec extends BinaryExec implements EstimatesRowSize { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + PhysicalPlan.class, + "LookupJoinExec", + LookupJoinExec::new + ); + + private final List matchFields; + private final List leftFields; + private final List rightFields; + private final List output; + private List lazyAddedFields; + + public LookupJoinExec( + Source source, + PhysicalPlan left, + PhysicalPlan lookup, + List matchFields, + List leftFields, + List rightFields, + List output + ) { + super(source, left, lookup); + this.matchFields = matchFields; + this.leftFields = leftFields; + this.rightFields = rightFields; + this.output = output; + } + + private LookupJoinExec(StreamInput in) throws IOException { + super(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(PhysicalPlan.class), in.readNamedWriteable(PhysicalPlan.class)); + this.matchFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.leftFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.rightFields = in.readNamedWriteableCollectionAsList(Attribute.class); + this.output = in.readNamedWriteableCollectionAsList(Attribute.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeNamedWriteableCollection(matchFields); + out.writeNamedWriteableCollection(leftFields); + out.writeNamedWriteableCollection(rightFields); + out.writeNamedWriteableCollection(output); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + public PhysicalPlan lookup() { + return right(); + } + + public List matchFields() { + return matchFields; + } + + public List leftFields() { + return leftFields; + } + + public List rightFields() { + return rightFields; + } + + public List addedFields() { + if (lazyAddedFields == null) { + AttributeSet set = outputSet(); + set.removeAll(left().output()); + for (Attribute m : matchFields) { + set.removeIf(a -> a.name().equals(m.name())); + } + lazyAddedFields = new ArrayList<>(set); + lazyAddedFields.sort(Comparator.comparing(Attribute::name)); + } + return lazyAddedFields; + } + + @Override + public PhysicalPlan estimateRowSize(State state) { + state.add(false, output); + return this; + } + + @Override + public List output() { + return output; + } + + @Override + public AttributeSet inputSet() { + // TODO: this is a hack since the right side is always materialized - instead this should + // return the _doc so the extraction can happen lazily + return left().outputSet(); + } + + @Override + protected AttributeSet computeReferences() { + // TODO: same as above - once lazy materialization of both sides lands, this needs updating + return Expressions.references(leftFields); + } + + @Override + public LookupJoinExec replaceChildren(PhysicalPlan left, PhysicalPlan right) { + return new LookupJoinExec(source(), left, right, matchFields, leftFields, rightFields, output); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LookupJoinExec::new, left(), right(), matchFields, leftFields, rightFields, output); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (super.equals(o) == false) { + return false; + } + LookupJoinExec hash = (LookupJoinExec) o; + return matchFields.equals(hash.matchFields) + && leftFields.equals(hash.leftFields) + && rightFields.equals(hash.rightFields) + && output.equals(hash.output); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), matchFields, leftFields, rightFields, output); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java index ecf78908d6d3e..d2935ccb75b66 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/PhysicalPlan.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.plan.physical; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.QueryPlan; @@ -20,34 +19,6 @@ * PhysicalPlan = take Delta, DEN to SJC, then SJC to SFO */ public abstract class PhysicalPlan extends QueryPlan { - public static List getNamedWriteables() { - return List.of( - AggregateExec.ENTRY, - DissectExec.ENTRY, - EnrichExec.ENTRY, - EsQueryExec.ENTRY, - EsSourceExec.ENTRY, - EvalExec.ENTRY, - ExchangeExec.ENTRY, - ExchangeSinkExec.ENTRY, - ExchangeSourceExec.ENTRY, - FieldExtractExec.ENTRY, - FilterExec.ENTRY, - FragmentExec.ENTRY, - GrokExec.ENTRY, - HashJoinExec.ENTRY, - LimitExec.ENTRY, - LocalSourceExec.ENTRY, - MvExpandExec.ENTRY, - OrderExec.ENTRY, - ProjectExec.ENTRY, - RowExec.ENTRY, - ShowExec.ENTRY, - SubqueryExec.ENTRY, - TopNExec.ENTRY - ); - } - public PhysicalPlan(Source source, List children) { super(source, children); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 0d0b8dda5fc74..1096c917fed4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -47,6 +47,7 @@ import org.elasticsearch.compute.operator.topn.TopNOperator.TopNOperatorFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.tasks.CancellableTask; @@ -63,6 +64,8 @@ import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexOperator; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import org.elasticsearch.xpack.esql.expression.Order; @@ -81,6 +84,7 @@ import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -125,6 +129,7 @@ public class LocalExecutionPlanner { private final ExchangeSourceHandler exchangeSourceHandler; private final ExchangeSinkHandler exchangeSinkHandler; private final EnrichLookupService enrichLookupService; + private final LookupFromIndexService lookupFromIndexService; private final PhysicalOperationProviders physicalOperationProviders; public LocalExecutionPlanner( @@ -138,6 +143,7 @@ public LocalExecutionPlanner( ExchangeSourceHandler exchangeSourceHandler, ExchangeSinkHandler exchangeSinkHandler, EnrichLookupService enrichLookupService, + LookupFromIndexService lookupFromIndexService, PhysicalOperationProviders physicalOperationProviders ) { this.sessionId = sessionId; @@ -149,6 +155,7 @@ public LocalExecutionPlanner( this.exchangeSourceHandler = exchangeSourceHandler; this.exchangeSinkHandler = exchangeSinkHandler; this.enrichLookupService = enrichLookupService; + this.lookupFromIndexService = lookupFromIndexService; this.physicalOperationProviders = physicalOperationProviders; this.configuration = configuration; } @@ -225,8 +232,10 @@ else if (node instanceof EsQueryExec esQuery) { // lookups and joins else if (node instanceof EnrichExec enrich) { return planEnrich(enrich, context); - } else if (node instanceof HashJoinExec lookup) { - return planHashJoin(lookup, context); + } else if (node instanceof HashJoinExec join) { + return planHashJoin(join, context); + } else if (node instanceof LookupJoinExec join) { + return planLookupJoin(join, context); } // output else if (node instanceof OutputExec outputExec) { @@ -353,11 +362,10 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case VERSION -> TopNEncoder.VERSION; case BOOLEAN, NULL, BYTE, SHORT, INTEGER, LONG, DOUBLE, FLOAT, HALF_FLOAT, DATETIME, DATE_NANOS, DATE_PERIOD, TIME_DURATION, OBJECT, SCALED_FLOAT, UNSIGNED_LONG, DOC_DATA_TYPE, TSID_DATA_TYPE -> TopNEncoder.DEFAULT_SORTABLE; - case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE -> + case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE, SOURCE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; - case SOURCE -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); }; } List orders = topNExec.order().stream().map(order -> { @@ -489,7 +497,8 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon enrichIndex, enrich.matchType(), enrich.policyMatchField(), - enrich.enrichFields() + enrich.enrichFields(), + enrich.source() ), layout ); @@ -558,6 +567,55 @@ private PhysicalOperation planHashJoin(HashJoinExec join, LocalExecutionPlannerC return source.with(new ProjectOperatorFactory(projection), layout); } + private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlannerContext context) { + PhysicalOperation source = plan(join.left(), context); + Layout.Builder layoutBuilder = source.layout.builder(); + for (Attribute f : join.addedFields()) { + layoutBuilder.append(f); + } + Layout layout = layoutBuilder.build(); + + // TODO: this works when the join happens on the coordinator + /* + * But when it happens on the data node we get a + * \_FieldExtractExec[language_code{f}#15, language_name{f}#16]<[]> + * \_EsQueryExec[languages_lookup], indexMode[lookup], query[][_doc{f}#18], limit[], sort[] estimatedRowSize[62] + * Which we'd prefer not to do - at least for now. We already know the fields we're loading + * and don't want any local planning. + */ + EsQueryExec localSourceExec = (EsQueryExec) join.lookup(); + if (localSourceExec.indexMode() != IndexMode.LOOKUP) { + throw new IllegalArgumentException("can't plan [" + join + "]"); + } + List matchFields = new ArrayList<>(join.matchFields().size()); + for (Attribute m : join.matchFields()) { + Layout.ChannelAndType t = source.layout.get(m.id()); + if (t == null) { + throw new IllegalArgumentException("can't plan [" + join + "][" + m + "]"); + } + matchFields.add(t); + } + if (matchFields.size() != 1) { + throw new IllegalArgumentException("can't plan [" + join + "]"); + } + + return source.with( + new LookupFromIndexOperator.Factory( + sessionId, + parentTask, + context.queryPragmas().enrichMaxWorkers(), + matchFields.getFirst().channel(), + lookupFromIndexService, + matchFields.getFirst().type(), + localSourceExec.index().name(), + join.matchFields().getFirst().name(), + join.addedFields().stream().map(f -> (NamedExpression) f).toList(), + join.source() + ), + layout + ); + } + private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { return EvalMapper.toEvaluator(exp, layout); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java index ceffae704cff0..fc52f2d5a9d23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/LocalMapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner.mapper; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; @@ -21,11 +22,12 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -98,26 +100,36 @@ private PhysicalPlan mapBinary(BinaryPlan binary) { // special handling for inlinejoin - join + subquery which has to be executed first (async) and replaced by its result if (binary instanceof Join join) { JoinConfig config = join.config(); - if (config.type() != JoinType.LEFT) { + if (config.type() != JoinTypes.LEFT) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } PhysicalPlan left = map(binary.left()); PhysicalPlan right = map(binary.right()); - if (right instanceof LocalSourceExec == false) { - throw new EsqlIllegalArgumentException("right side of a join must be a local source"); + // if the right is data we can use a hash join directly + if (right instanceof LocalSourceExec localData) { + return new HashJoinExec( + join.source(), + left, + localData, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); + } + if (right instanceof EsSourceExec source && source.indexMode() == IndexMode.LOOKUP) { + return new LookupJoinExec( + join.source(), + left, + right, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); } - - return new HashJoinExec( - join.source(), - left, - right, - config.matchFields(), - config.leftFields(), - config.rightFields(), - join.output() - ); } return MapperUtils.unsupported(binary); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java index b717af650b7a6..23e6f4fb91d18 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/Mapper.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner.mapper; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.util.Holder; @@ -23,13 +24,14 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; import org.elasticsearch.xpack.esql.plan.physical.HashJoinExec; import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; +import org.elasticsearch.xpack.esql.plan.physical.LookupJoinExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -178,7 +180,7 @@ private PhysicalPlan mapUnary(UnaryPlan unary) { private PhysicalPlan mapBinary(BinaryPlan bp) { if (bp instanceof Join join) { JoinConfig config = join.config(); - if (config.type() != JoinType.LEFT) { + if (config.type() != JoinTypes.LEFT) { throw new EsqlIllegalArgumentException("unsupported join type [" + config.type() + "]"); } @@ -190,7 +192,7 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { } PhysicalPlan right = map(bp.right()); - // no fragment means lookup + // if the right is data we can use a hash join directly if (right instanceof LocalSourceExec localData) { return new HashJoinExec( join.source(), @@ -202,6 +204,19 @@ private PhysicalPlan mapBinary(BinaryPlan bp) { join.output() ); } + if (right instanceof FragmentExec fragment + && fragment.fragment() instanceof EsRelation relation + && relation.indexMode() == IndexMode.LOOKUP) { + return new LookupJoinExec( + join.source(), + left, + right, + config.matchFields(), + config.leftFields(), + config.rightFields(), + join.output() + ); + } } return MapperUtils.unsupported(bp); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index 213e33f3712b1..ea21943aced9b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -9,7 +9,10 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -27,6 +30,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -39,10 +43,11 @@ import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import java.util.ArrayList; import java.util.List; /** @@ -53,7 +58,15 @@ private MapperUtils() {} static PhysicalPlan mapLeaf(LeafPlan p) { if (p instanceof Row row) { - return new RowExec(row.source(), row.fields()); + // return new RowExec(row.source(), row.fields()); + // convert row into local relation + List fields = row.fields(); + List values = new ArrayList<>(fields.size()); + for (Alias field : fields) { + values.add(field.child().fold()); + } + Block[] blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); + p = new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); } if (p instanceof LocalRelation local) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 76de337ded5c6..fc4c057e52ab6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -61,6 +61,7 @@ import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; @@ -98,6 +99,7 @@ public class ComputeService { private final DriverTaskRunner driverRunner; private final ExchangeService exchangeService; private final EnrichLookupService enrichLookupService; + private final LookupFromIndexService lookupFromIndexService; private final ClusterService clusterService; public ComputeService( @@ -105,6 +107,7 @@ public ComputeService( TransportService transportService, ExchangeService exchangeService, EnrichLookupService enrichLookupService, + LookupFromIndexService lookupFromIndexService, ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, @@ -125,6 +128,7 @@ public ComputeService( this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); this.exchangeService = exchangeService; this.enrichLookupService = enrichLookupService; + this.lookupFromIndexService = lookupFromIndexService; this.clusterService = clusterService; } @@ -429,6 +433,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, context.exchangeSource(), context.exchangeSink(), enrichLookupService, + lookupFromIndexService, new EsPhysicalOperationProviders(contexts) ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index e9b9f571e880e..67948fe717f2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -21,8 +21,8 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; @@ -38,6 +38,7 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.compute.operator.topn.TopNOperatorStatus; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; @@ -45,6 +46,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; @@ -58,17 +60,10 @@ import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.enrich.EnrichLookupOperator; import org.elasticsearch.xpack.esql.execution.PlanExecutor; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.IndexResolver; @@ -116,7 +111,7 @@ public Collection createComponents(PluginServices services) { BlockFactory blockFactory = new BlockFactory(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize); setupSharedSecrets(); return List.of( - new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry()), + new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry(), getLicenseState()), new ExchangeService(services.clusterService().getSettings(), services.threadPool(), ThreadPool.Names.SEARCH, blockFactory), blockFactory ); @@ -131,6 +126,11 @@ private void setupSharedSecrets() { } } + // to be overriden by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + /** * The settings defined by the ESQL plugin. * @@ -192,18 +192,10 @@ public List getNamedWriteables() { entries.add(SingleValueQuery.ENTRY); entries.add(AsyncOperator.Status.ENTRY); entries.add(EnrichLookupOperator.Status.ENTRY); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); // TODO combine with above once these are in the same project - entries.addAll(NamedExpression.getNamedWriteables()); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); // TODO combine with above once these are in the same project - entries.addAll(Expression.getNamedWriteables()); - entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); // TODO combine with above once these are in the same project - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); + + entries.addAll(BlockWritables.getNamedWriteables()); + entries.addAll(ExpressionWritables.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); return entries; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 04e5fdc4b3bd2..fdc6e06a11032 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -101,6 +101,7 @@ public TransportEsqlQueryAction( transportService, exchangeService, enrichLookupService, + lookupFromIndexService, clusterService, threadPool, bigArrays, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index c576d15f92608..9630a520e8654 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -292,10 +292,10 @@ private void preAnalyze( var unresolvedPolicies = preAnalysis.enriches.stream() .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) .collect(Collectors.toSet()); + final List indices = preAnalysis.indices; + // TODO: make a separate call for lookup indices final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( - preAnalysis.indices.stream() - .flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))) - .toArray(String[]::new) + indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) ).keySet(); enrichPolicyResolver.resolvePolicies(targetClusters, unresolvedPolicies, listener.delegateFailureAndWrap((l, enrichResolution) -> { // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 348ca4acd100e..ff0c0d5a5d14c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; +import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.index.EsIndex; @@ -253,7 +254,10 @@ public final void test() throws Throwable { "can't use MATCH function in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_FUNCTION.capabilityName()) ); - + assumeFalse( + "lookup join disabled for csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP.capabilityName()) + ); if (Build.current().isSnapshot()) { assertThat( "Capability is not included in the enabled list capabilities on a snapshot build. Spelling mistake?", @@ -542,6 +546,7 @@ void executeSubPlan( exchangeSource, exchangeSink, Mockito.mock(EnrichLookupService.class), + Mockito.mock(LookupFromIndexService.class), physicalOperationProviders ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index 0a9e8b1b90681..9c24ec96dddf8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -24,15 +24,11 @@ import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -115,17 +111,9 @@ public static NamedWriteableRegistry writableRegistry() { entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, RegexpQueryBuilder.NAME, RegexpQueryBuilder::new)); entries.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, ExistsQueryBuilder.NAME, ExistsQueryBuilder::new)); entries.add(SingleValueQuery.ENTRY); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.addAll(NamedExpression.getNamedWriteables()); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); + entries.addAll(ExpressionWritables.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); + entries.addAll(BlockWritables.getNamedWriteables()); return new NamedWriteableRegistry(entries); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java index 2f3aa09868637..134981d3c3b0c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseProfileTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverSleeps; @@ -39,7 +39,7 @@ protected EsqlQueryResponse.Profile mutateInstance(EsqlQueryResponse.Profile ins @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), BlockWritables.getNamedWriteables().stream()).toList() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 4aaf4f6cccf0f..35364089127cc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -99,7 +100,7 @@ public void blockFactoryEmpty() { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), Block.getNamedWriteables().stream()).toList() + Stream.concat(Stream.of(AbstractPageMappingOperator.Status.ENTRY), BlockWritables.getNamedWriteables().stream()).toList() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index c1b2adddfc838..2770ed1f336ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1909,11 +1909,11 @@ public void testLookup() { String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } LogicalPlan plan = analyze(query); @@ -1945,18 +1945,14 @@ public void testLookup() { .item(startsWith("job{f}")) .item(startsWith("job.raw{f}")) /* - * Int is a reference here because we renamed it in project. - * If we hadn't it'd be a field and that'd be fine. + * Int key is returned as a full field (despite the rename) */ - .item(containsString("int{r}")) + .item(containsString("int{f}")) .item(startsWith("last_name{f}")) .item(startsWith("long_noidx{f}")) .item(startsWith("salary{f}")) /* - * It's important that name is returned as a *reference* here - * instead of a field. If it were a field we'd use SearchStats - * on it and discover that it doesn't exist in the index. It doesn't! - * We don't expect it to. It exists only in the lookup table. + * As is the name column from the right side. */ .item(containsString("name{f}")) ); @@ -1965,11 +1961,11 @@ public void testLookup() { public void testLookupMissingField() { String query = """ FROM test - | LOOKUP int_number_names ON garbage + | LOOKUP_🐔 int_number_names ON garbage """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -1979,11 +1975,11 @@ public void testLookupMissingField() { public void testLookupMissingTable() { String query = """ FROM test - | LOOKUP garbage ON a + | LOOKUP_🐔 garbage ON a """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 2:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -1994,11 +1990,11 @@ public void testLookupMatchTypeWrong() { String query = """ FROM test | RENAME last_name AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int """; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var e = expectThrows(VerificationException.class, () -> analyze(query)); @@ -2321,8 +2317,6 @@ public void testInvalidNamedParamsForIdentifierPatterns() { } public void testFromEnrichAndMatchColonUsage() { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - LogicalPlan plan = analyze(""" from *:test | EVAL x = to_string(languages) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8b364a603405c..8da6863465d39 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -195,13 +195,13 @@ public void testUnsupportedAndMultiTypedFields() { if (EsqlCapabilities.Cap.LOOKUP_V4.isEnabled()) { // LOOKUP with unsupported type assertEquals( - "1:41: column type mismatch, table column was [integer] and original column was [unsupported]", - error("from test* | lookup int_number_names on int", analyzer) + "1:43: column type mismatch, table column was [integer] and original column was [unsupported]", + error("from test* | lookup_🐔 int_number_names on int", analyzer) ); // LOOKUP with multi-typed field assertEquals( - "1:44: column type mismatch, table column was [double] and original column was [unsupported]", - error("from test* | lookup double_number_names on double", analyzer) + "1:46: column type mismatch, table column was [double] and original column was [unsupported]", + error("from test* | lookup_🐔 double_number_names on double", analyzer) ); } @@ -404,6 +404,11 @@ public void testAggFilterOnBucketingOrAggFunctions() { query("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by bucket(salary, 10)"); // but fails if it's different + assertEquals( + "1:32: can only use grouping function [bucket(a, 3)] part of the BY clause", + error("row a = 1 | stats sum(a) where bucket(a, 3) > -1 by bucket(a,2)") + ); + assertEquals( "1:40: can only use grouping function [bucket(salary, 10)] part of the BY clause", error("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by emp_no") @@ -771,40 +776,40 @@ public void testWrongInputParam() { public void testPeriodAndDurationInRowAssignment() { for (var unit : TIME_DURATIONS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::time_duration] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::time_duration] directly in a row assignment", error("row a = 1 " + unit + "::time_duration") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", error("row a = \"1 " + unit + "\"::time_duration") ); assertEquals( - "1:5: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", error("row a = to_timeduration(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_timeduration(\"1 " + unit + "\")") ); } for (var unit : DATE_PERIODS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::date_period] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::date_period] directly in a row assignment", error("row a = 1 " + unit + "::date_period") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", error("row a = \"1 " + unit + "\"::date_period") ); assertEquals( - "1:5: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", error("row a = to_dateperiod(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_dateperiod(\"1 " + unit + "\")") ); } @@ -1159,8 +1164,6 @@ public void testMatchInsideEval() throws Exception { } public void testMatchFilter() throws Exception { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - assertEquals( "1:19: first argument of [salary:\"100\"] must be [string], found value [salary] type [integer]", error("from test | where salary:\"100\"") @@ -1190,7 +1193,6 @@ public void testMatchFunctionNotAllowedAfterCommands() throws Exception { } public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") @@ -1271,7 +1273,6 @@ public void testMatchFunctionOnlyAllowedInWhere() throws Exception { } public void testMatchOperatornOnlyAllowedInWhere() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsOnlyAllowedInWhere(":", "first_name:\"Anna\"", "operator"); } @@ -1317,8 +1318,6 @@ public void testMatchFunctionWithDisjunctions() { } public void testMatchOperatorWithDisjunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - checkWithDisjunctions(":", "first_name : \"Anna\"", "operator"); } @@ -1374,7 +1373,6 @@ public void testMatchFunctionWithNonBooleanFunctions() { } public void testMatchOperatorWithNonBooleanFunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsWithNonBooleanFunctions(":", "first_name:\"Anna\"", "operator"); } @@ -1452,8 +1450,6 @@ public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { "1:68: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") ); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:62: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where first_name : \"Anna\"") @@ -1473,8 +1469,6 @@ public void testMatchFunctionNullArgs() throws Exception { public void testMatchTargetsExistingField() throws Exception { assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals("1:33: Unknown column [first_name]", error("from test | keep emp_no | where first_name : \"Anna\"")); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java index a2aa447c748e9..6dd0c5fe88afd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java @@ -8,20 +8,11 @@ package org.elasticsearch.xpack.esql.expression; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; -import java.util.ArrayList; -import java.util.List; - public abstract class AbstractExpressionSerializationTests extends AbstractNodeSerializationTests { public static Expression randomChild() { return ReferenceAttributeTests.randomReferenceAttribute(false); @@ -29,17 +20,7 @@ public static Expression randomChild() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(EsqlScalarFunction.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(FullTextFunction.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); - entries.add(UnsupportedAttribute.EXPRESSION_ENTRY); - entries.add(org.elasticsearch.xpack.esql.expression.Order.ENTRY); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.getNamedWriteables()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java index ccbed01994bf7..7bb8ab3e147e2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AliasTests.java @@ -11,23 +11,18 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.AbstractWireTestCase; import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.tree.SourceTests; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.in; public class AliasTests extends AbstractWireTestCase { public static Alias randomAlias() { @@ -76,10 +71,6 @@ protected Alias copyInstance(Alias instance, TransportVersion version) throws IO @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(NamedExpression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - entries.addAll(Expression.getNamedWriteables()); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.allExpressions()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java index a9750acdb1b84..d59e309790ad2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAttributeTestCase.java @@ -15,13 +15,12 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.session.Configuration; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; import static org.hamcrest.Matchers.sameInstance; @@ -52,9 +51,7 @@ protected final ExtraAttribute mutateInstance(ExtraAttribute instance) { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(Attribute.getNamedWriteables()); - entries.add(UnsupportedAttribute.ENTRY); - return new NamedWriteableRegistry(entries); + return new NamedWriteableRegistry(ExpressionWritables.attributes()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 181b8d52bf888..7802d74d2264f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; @@ -130,7 +131,9 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { entry("mod", Mod.class), entry("neg", Neg.class), entry("is_null", IsNull.class), - entry("is_not_null", IsNotNull.class) + entry("is_not_null", IsNotNull.class), + // Match operator is both a function and an operator + entry("match_operator", Match.class) ); private static EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry().snapshotRegistry(); @@ -813,6 +816,10 @@ private static String buildSignatureSvg(String name) throws IOException { if (unaryOperator != null) { return RailRoadDiagram.unaryOperator(unaryOperator); } + String searchOperator = searchOperator(name); + if (searchOperator != null) { + return RailRoadDiagram.searchOperator(searchOperator); + } FunctionDefinition definition = definition(name); if (definition != null) { return RailRoadDiagram.functionSignature(definition); @@ -862,7 +869,7 @@ public static void renderDocs() throws IOException { return; } String name = functionName(); - if (binaryOperator(name) != null || unaryOperator(name) != null || likeOrInOperator(name)) { + if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { renderDocsForOperators(name); return; } @@ -1258,6 +1265,16 @@ private static String binaryOperator(String name) { }; } + /** + * If this test is a for a search operator return its symbol, otherwise return {@code null}. + */ + private static String searchOperator(String name) { + return switch (name) { + case "match_operator" -> ":"; + default -> null; + }; + } + /** * If this tests is for a unary operator return its symbol, otherwise return {@code null}. * This is functionally the reverse of {@link ExpressionBuilder#visitArithmeticUnary}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java new file mode 100644 index 0000000000000..98f36d339976c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/CheckLicenseTests.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.TestUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; +import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; +import org.elasticsearch.xpack.esql.analysis.Verifier; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.function.Function; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.parser.EsqlParser; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.stats.Metrics; + +import java.util.List; + +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.analyzerDefaultMapping; +import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.defaultEnrichResolution; +import static org.hamcrest.Matchers.containsString; + +public class CheckLicenseTests extends ESTestCase { + + private final EsqlParser parser = new EsqlParser(); + private final String esql = "from tests | eval license() | LIMIT 10"; + + public void testLicense() { + for (License.OperationMode functionLicense : License.OperationMode.values()) { + final LicensedFeature functionLicenseFeature = random().nextBoolean() + ? LicensedFeature.momentary("test", "license", functionLicense) + : LicensedFeature.persistent("test", "license", functionLicense); + final EsqlFunctionRegistry.FunctionBuilder builder = (source, expression, cfg) -> { + final LicensedFunction licensedFunction = new LicensedFunction(source); + licensedFunction.setLicensedFeature(functionLicenseFeature); + return licensedFunction; + }; + for (License.OperationMode operationMode : License.OperationMode.values()) { + if (License.OperationMode.TRIAL != operationMode && License.OperationMode.compare(operationMode, functionLicense) < 0) { + // non-compliant license + final VerificationException ex = expectThrows(VerificationException.class, () -> analyze(builder, operationMode)); + assertThat(ex.getMessage(), containsString("current license is non-compliant for function [license()]")); + } else { + // compliant license + assertNotNull(analyze(builder, operationMode)); + } + } + } + } + + private LogicalPlan analyze(EsqlFunctionRegistry.FunctionBuilder builder, License.OperationMode operationMode) { + final FunctionDefinition def = EsqlFunctionRegistry.def(LicensedFunction.class, builder, "license"); + final EsqlFunctionRegistry registry = new EsqlFunctionRegistry(def) { + @Override + public EsqlFunctionRegistry snapshotRegistry() { + return this; + } + }; + return analyzer(registry, operationMode).analyze(parser.createStatement(esql)); + } + + private static Analyzer analyzer(EsqlFunctionRegistry registry, License.OperationMode operationMode) { + return new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, registry, analyzerDefaultMapping(), defaultEnrichResolution()), + new Verifier(new Metrics(new EsqlFunctionRegistry()), getLicenseState(operationMode)) + ); + } + + private static XPackLicenseState getLicenseState(License.OperationMode operationMode) { + final TestUtils.UpdatableLicenseState licenseState = new TestUtils.UpdatableLicenseState(); + licenseState.update(new XPackLicenseStatus(operationMode, true, null)); + return licenseState; + } + + // It needs to be public because we run validation on it via reflection in org.elasticsearch.xpack.esql.tree.EsqlNodeSubclassTests. + // This test prevents to add the license as constructor parameter too. + public static class LicensedFunction extends Function { + + private LicensedFeature licensedFeature; + + public LicensedFunction(Source source) { + super(source, List.of()); + } + + void setLicensedFeature(LicensedFeature licensedFeature) { + this.licensedFeature = licensedFeature; + } + + @Override + public boolean checkLicense(XPackLicenseState state) { + if (licensedFeature instanceof LicensedFeature.Momentary momentary) { + return momentary.check(state); + } else { + return licensedFeature.checkWithoutTracking(state); + } + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + + @Override + public Expression replaceChildren(List newChildren) { + throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this); + } + + @Override + public String getWriteableName() { + throw new UnsupportedOperationException(); + } + + @Override + public void writeTo(StreamOutput out) { + throw new UnsupportedOperationException(); + } + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java index 7fe67707a7976..775ca45bfa124 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/MultiRowTestCaseSupplier.java @@ -262,6 +262,42 @@ public static List dateCases(int minRows, int maxRows) { return cases; } + /** + * + * Generate cases for {@link DataType#DATE_NANOS}. + * + */ + public static List dateNanosCases(int minRows, int maxRows) { + List cases = new ArrayList<>(); + addSuppliers(cases, minRows, maxRows, "<1970-01-01T00:00:00.000000000Z>", DataType.DATE_NANOS, () -> 0L); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(0, 10 * (long) 10e11) + ); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(10 * (long) 10e11, Long.MAX_VALUE) + ); + addSuppliers( + cases, + minRows, + maxRows, + "", + DataType.DATE_NANOS, + () -> ESTestCase.randomLongBetween(Long.MAX_VALUE / 100 * 99, Long.MAX_VALUE) + ); + + return cases; + } + public static List booleanCases(int minRows, int maxRows) { List cases = new ArrayList<>(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java index df0737feadd8d..43e2ededeff0e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java @@ -89,6 +89,18 @@ static String binaryOperator(String operator) throws IOException { return toSvg(new Sequence(expressions.toArray(Expression[]::new))); } + /** + * Generate a railroad diagram for a search operator. The output would look like + * {@code field : value}. + */ + static String searchOperator(String operator) throws IOException { + List expressions = new ArrayList<>(); + expressions.add(new Literal("field")); + expressions.add(new Syntax(operator)); + expressions.add(new Literal("query")); + return toSvg(new Sequence(expressions.toArray(Expression[]::new))); + } + /** * Generate a railroad diagram for unary operator. The output would look like * {@code -v}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java index 5e23083d7c810..fff2d824fc710 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinctTests.java @@ -52,6 +52,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.dateNanosCases(1, 1000), MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java index 9756804a1ec0f..7d4b46f2a902a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MaxTests.java @@ -90,6 +90,15 @@ public static Iterable parameters() { equalTo(200L) ) ), + new TestCaseSupplier( + List.of(DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATE_NANOS, "field")), + "Max[field=Attribute[channel=0]]", + DataType.DATE_NANOS, + equalTo(200L) + ) + ), new TestCaseSupplier( List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java index 171181496c889..58ef8d86017a8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/MinTests.java @@ -90,6 +90,15 @@ public static Iterable parameters() { equalTo(200L) ) ), + new TestCaseSupplier( + List.of(DataType.DATE_NANOS), + () -> new TestCaseSupplier.TestCase( + List.of(TestCaseSupplier.TypedData.multiRow(List.of(200L), DataType.DATE_NANOS, "field")), + "Min[field=Attribute[channel=0]]", + DataType.DATE_NANOS, + equalTo(200L) + ) + ), new TestCaseSupplier( List.of(DataType.BOOLEAN), () -> new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 55320543d0ec3..29faceee7497e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -45,6 +45,7 @@ public static Iterable parameters() { MultiRowTestCaseSupplier.longCases(1, 1000, Long.MIN_VALUE, Long.MAX_VALUE, true), MultiRowTestCaseSupplier.doubleCases(1, 1000, -Double.MAX_VALUE, Double.MAX_VALUE, true), MultiRowTestCaseSupplier.dateCases(1, 1000), + MultiRowTestCaseSupplier.dateNanosCases(1, 1000), MultiRowTestCaseSupplier.booleanCases(1, 1000), MultiRowTestCaseSupplier.ipCases(1, 1000), MultiRowTestCaseSupplier.versionCases(1, 1000), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java new file mode 100644 index 0000000000000..32e9670286ef7 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.LinkedList; +import java.util.List; +import java.util.function.Supplier; + +/** + * This class is only used to generates docs for the match operator - all testing is done in {@link MatchTests} + */ +@FunctionName("match_operator") +public class MatchOperatorTests extends MatchTests { + + public MatchOperatorTests(@Name("TestCase") Supplier testCaseSupplier) { + super(testCaseSupplier); + } + + @ParametersFactory + public static Iterable parameters() { + // Have a minimal test so that we can generate the appropriate types in the docs + List suppliers = new LinkedList<>(); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.KEYWORD), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.KEYWORD), suppliers); + return parameterSuppliersFromTypedData(suppliers); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 6d0c45a972299..6a4a7404135f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -36,19 +36,11 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); - Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); - Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); - List> supportedPerPosition = List.of( - supportedTextParams, - supportedTextParams, - supportedNumericParams, - supportedFuzzinessParams - ); + List> supportedPerPosition = supportedParams(); List suppliers = new LinkedList<>(); for (DataType fieldType : DataType.stringTypes()) { for (DataType queryType : DataType.stringTypes()) { - addPositiveTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); + addPositiveTestCase(List.of(fieldType, queryType), suppliers); addNonFieldTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); } } @@ -61,11 +53,20 @@ public static Iterable parameters() { ); } - private static void addPositiveTestCase( - List paramDataTypes, - List> supportedPerPosition, - List suppliers - ) { + protected static List> supportedParams() { + Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); + Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); + Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of( + supportedTextParams, + supportedTextParams, + supportedNumericParams, + supportedFuzzinessParams + ); + return supportedPerPosition; + } + + protected static void addPositiveTestCase(List paramDataTypes, List suppliers) { // Positive case - creates an ES field from the field parameter type suppliers.add( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 65f5653f27e1a..11894cf5b847b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.plugin.EsqlCorePlugin; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -397,9 +396,6 @@ protected static void dateNanos( DataType expectedDataType, BiFunction> matcher ) { - if (EsqlCorePlugin.DATE_NANOS_FEATURE_FLAG.isEnabled() == false) { - return; - } cases.add( new TestCaseSupplier( name + "(epoch nanos)", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java index e8f9f26a76f43..0d114b4964920 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/LessThanTests.java @@ -113,8 +113,8 @@ public static Iterable parameters() { "rhs", (l, r) -> ((Number) l).longValue() < ((Number) r).longValue(), DataType.BOOLEAN, - TestCaseSupplier.dateCases(), - TestCaseSupplier.dateCases(), + TestCaseSupplier.dateNanosCases(), + TestCaseSupplier.dateNanosCases(), List.of(), false ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java index d3e1710a715af..9a1a30b892b22 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanStreamOutputTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -23,10 +24,10 @@ import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.function.FieldAttributeTests; import org.elasticsearch.xpack.esql.expression.function.MetadataAttributeTests; import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests; -import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttributeTests; import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.esql.type.EsFieldTests; @@ -280,9 +281,8 @@ private Column randomColumn() { static { List writeables = new ArrayList<>(); - writeables.addAll(Block.getNamedWriteables()); - writeables.addAll(Attribute.getNamedWriteables()); - writeables.add(UnsupportedAttribute.ENTRY); + writeables.addAll(BlockWritables.getNamedWriteables()); + writeables.addAll(ExpressionWritables.attributes()); REGISTRY = new NamedWriteableRegistry(new ArrayList<>(new HashSet<>(writeables))); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 073a51ee69114..269b4806680a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -22,10 +22,10 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -145,7 +145,7 @@ private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichRes return new Analyzer( new AnalyzerContext(config, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } @@ -1092,8 +1092,6 @@ public void testMissingFieldsDoNotGetExtracted() { * estimatedRowSize[324] */ public void testSingleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - var plan = plannerOptimizer.plan(""" from test | where first_name:"Anna" @@ -1124,8 +1122,6 @@ public void testSingleMatchFilterPushdown() { * [_doc{f}#22], limit[1000], sort[[FieldSort[field=emp_no{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[336] */ public void testMultipleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - String query = """ from test | where first_name:"Anna" and first_name:"Anneke" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c29f111488f96..96951ee15d48b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -111,7 +111,7 @@ import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.join.InlineJoin; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; @@ -2563,7 +2563,7 @@ public void testSimplifyRLikeMatchAll() { public void testRLikeWrongPattern() { String query = "from test | where first_name rlike \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[invalid range: from (95) cannot be > to (93)]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); @@ -2571,7 +2571,7 @@ public void testRLikeWrongPattern() { public void testLikeWrongPattern() { String query = "from test | where first_name like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); @@ -5624,6 +5624,7 @@ protected List filteredWarnings() { * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupSimple() { String query = """ FROM test @@ -5650,7 +5651,7 @@ public void testLookupSimple() { var limit = as(left.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(1000)); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat(join.config().leftFields().size(), equalTo(1)); assertThat(join.config().rightFields().size(), equalTo(1)); @@ -5703,6 +5704,7 @@ public void testLookupSimple() { * 9]]], BytesRefVectorBlock[vector=BytesRefArrayVector[positions=10]]]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupStats() { String query = """ FROM test @@ -5738,7 +5740,7 @@ public void testLookupStats() { assertThat(left.output().toString(), containsString("int{r}")); as(left.child(), EsRelation.class); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertThat(join.config().matchFields().stream().map(Object::toString).toList(), matchesList().item(startsWith("int{r}"))); assertThat(join.config().leftFields().size(), equalTo(1)); assertThat(join.config().rightFields().size(), equalTo(1)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index eb115ed7b2948..f3ba11457a715 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -93,7 +93,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Project; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.join.Join; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -114,7 +114,6 @@ import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -2751,7 +2750,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); var eval = as(agg.child(), EvalExec.class); - as(eval.child(), RowExec.class); + as(eval.child(), LocalSourceExec.class); // Now optimize the plan and assert the same plan again, since no FieldExtractExec is added var optimized = optimizedPlan(plan); @@ -2765,7 +2764,7 @@ public void testSpatialTypesAndStatsUseDocValuesNestedLiteral() { assertThat("No groupings in aggregation", agg.groupings().size(), equalTo(0)); assertAggregation(agg, "centroid", SpatialCentroid.class, GEO_POINT, false); eval = as(agg.child(), EvalExec.class); - as(eval.child(), RowExec.class); + as(eval.child(), LocalSourceExec.class); } /** @@ -6423,11 +6422,12 @@ public void testMaxQueryDepthPlusExpressionDepth() { assertThat(e.getMessage(), containsString("ESQL statement exceeded the maximum query depth allowed (" + MAX_QUERY_DEPTH + ")")); } + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupSimple() { String query = """ FROM test | RENAME languages AS int - | LOOKUP int_number_names ON int"""; + | LOOKUP_🐔 int_number_names ON int"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); @@ -6468,18 +6468,19 @@ public void testLookupSimple() { * \_EsQueryExec[...] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupThenProject() { String query = """ FROM employees | SORT emp_no | LIMIT 4 | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int | RENAME int AS languages, name AS lang_name | KEEP emp_no, languages, lang_name"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 5:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 5:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } PhysicalPlan plan = optimizedPlan(physicalPlan(query)); @@ -6526,17 +6527,18 @@ public void testLookupThenProject() { * \_LocalRelation[[int{f}#24, name{f}#25],[...]] * } */ + @AwaitsFix(bugUrl = "lookup functionality is not yet implemented") public void testLookupThenTopN() { String query = """ FROM employees | RENAME languages AS int - | LOOKUP int_number_names ON int + | LOOKUP_🐔 int_number_names ON int | RENAME name AS languages | KEEP languages, emp_no | SORT languages ASC, emp_no ASC"""; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> analyze(query)); - assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 3:3: mismatched input 'LOOKUP_🐔' expecting {")); return; } var plan = physicalPlan(query); @@ -6553,7 +6555,7 @@ public void testLookupThenTopN() { matchesList().item(startsWith("name{f}")).item(startsWith("emp_no{f}")) ); Join join = as(innerTopN.child(), Join.class); - assertThat(join.config().type(), equalTo(JoinType.LEFT)); + assertThat(join.config().type(), equalTo(JoinTypes.LEFT)); assertMap(join.config().matchFields().stream().map(Objects::toString).toList(), matchesList().item(startsWith("int{r}"))); Project innerProject = as(join.left(), Project.class); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 0177747d27243..710637c05a900 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -134,7 +134,7 @@ public void testStringLiteralsExceptions() { ); var number = "1" + IntStream.range(0, 309).mapToObj(ignored -> "0").collect(Collectors.joining()); - assertParsingException(() -> parse("row foo == " + number), "line 1:13: Number [" + number + "] is too large"); + assertParsingException(() -> parse("row foo == " + number), "line 1:12: Number [" + number + "] is too large"); } public void testBooleanLiteralsCondition() { @@ -442,20 +442,20 @@ public void testOverflowingValueForDuration() { for (String unit : List.of("milliseconds", "seconds", "minutes", "hours")) { assertParsingException( () -> parse("row x = 9223372036854775808 " + unit), // unsigned_long (Long.MAX_VALUE + 1) - "line 1:10: Number [9223372036854775808] outside of [" + unit + "] range" + "line 1:9: Number [9223372036854775808] outside of [" + unit + "] range" ); assertParsingException( () -> parse("row x = 18446744073709551616 " + unit), // double (UNSIGNED_LONG_MAX + 1) - "line 1:10: Number [18446744073709551616] outside of [" + unit + "] range" + "line 1:9: Number [18446744073709551616] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 153722867280912931 minutes"), // Long.MAX_VALUE / 60 + 1 - "line 1:10: Number [153722867280912931] outside of [minutes] range" + "line 1:9: Number [153722867280912931] outside of [minutes] range" ); assertParsingException( () -> parse("row x = 2562047788015216 hours"), // Long.MAX_VALUE / 3600 + 1 - "line 1:10: Number [2562047788015216] outside of [hours] range" + "line 1:9: Number [2562047788015216] outside of [hours] range" ); } @@ -463,12 +463,12 @@ public void testOverflowingValueForPeriod() { for (String unit : List.of("days", "weeks", "months", "years")) { assertParsingException( () -> parse("row x = 2147483648 " + unit), // long (Integer.MAX_VALUE + 1) - "line 1:10: Number [2147483648] outside of [" + unit + "] range" + "line 1:9: Number [2147483648] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 306783379 weeks"), // Integer.MAX_VALUE / 7 + 1 - "line 1:10: Number [306783379] outside of [weeks] range" + "line 1:9: Number [306783379] outside of [weeks] range" ); } @@ -544,7 +544,7 @@ public void testWildcardProjectAwayPatterns() { } public void testForbidWildcardProjectAway() { - assertParsingException(() -> dropExpression("foo, *"), "line 1:21: Removing all fields is not allowed [*]"); + assertParsingException(() -> dropExpression("foo, *"), "line 1:20: Removing all fields is not allowed [*]"); } public void testForbidMultipleIncludeStar() { @@ -608,7 +608,7 @@ public void testMultipleProjectPatterns() { } public void testForbidWildcardProjectRename() { - assertParsingException(() -> renameExpression("b* AS a*"), "line 1:18: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); + assertParsingException(() -> renameExpression("b* AS a*"), "line 1:17: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); } public void testSimplifyInWithSingleElementList() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 0f46c1f44e8d3..69c00eb395fdb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -491,25 +491,25 @@ private void clusterAndIndexAsIndexPattern(String command, String clusterAndInde public void testStringAsLookupIndexPattern() { assumeTrue("requires snapshot build", Build.current().isSnapshot()); - assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"foo\" ON j"); + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"foo\" ON j"); assertStringAsLookupIndexPattern("test-*", """ - ROW x = 1 | LOOKUP "test-*" ON j + ROW x = 1 | LOOKUP_🐔 "test-*" ON j """); - assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP test-* ON j"); - assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP 123-test@foo_bar+baz1 ON j"); + assertStringAsLookupIndexPattern("test-*", "ROW x = 1 | LOOKUP_🐔 test-* ON j"); + assertStringAsLookupIndexPattern("123-test@foo_bar+baz1", "ROW x = 1 | LOOKUP_🐔 123-test@foo_bar+baz1 ON j"); assertStringAsLookupIndexPattern("foo, test-*, abc, xyz", """ - ROW x = 1 | LOOKUP "foo, test-*, abc, xyz" ON j + ROW x = 1 | LOOKUP_🐔 "foo, test-*, abc, xyz" ON j """); - assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP ON j"); + assertStringAsLookupIndexPattern("", "ROW x = 1 | LOOKUP_🐔 ON j"); assertStringAsLookupIndexPattern( "", - "ROW x = 1 | LOOKUP \"\" ON j" + "ROW x = 1 | LOOKUP_🐔 \"\" ON j" ); - assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP \"\"\"foo\"\"\" ON j"); - assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP `backtick` ON j"); - assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP ``multiple`back``ticks``` ON j"); - assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP .dot ON j"); + assertStringAsLookupIndexPattern("foo", "ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\" ON j"); + assertStringAsLookupIndexPattern("`backtick`", "ROW x = 1 | LOOKUP_🐔 `backtick` ON j"); + assertStringAsLookupIndexPattern("``multiple`back``ticks```", "ROW x = 1 | LOOKUP_🐔 ``multiple`back``ticks``` ON j"); + assertStringAsLookupIndexPattern(".dot", "ROW x = 1 | LOOKUP_🐔 .dot ON j"); clusterAndIndexAsLookupIndexPattern("cluster:index"); clusterAndIndexAsLookupIndexPattern("cluster:.index"); clusterAndIndexAsLookupIndexPattern("cluster*:index*"); @@ -519,16 +519,16 @@ public void testStringAsLookupIndexPattern() { } private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) { - assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP " + clusterAndIndex + " ON j"); - assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP \"" + clusterAndIndex + "\"" + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 " + clusterAndIndex + " ON j"); + assertStringAsLookupIndexPattern(clusterAndIndex, "ROW x = 1 | LOOKUP_🐔 \"" + clusterAndIndex + "\"" + " ON j"); } public void testInvalidCharacterInIndexPattern() { Map commands = new HashMap<>(); - commands.put("FROM {}", "line 1:7: "); + commands.put("FROM {}", "line 1:6: "); if (Build.current().isSnapshot()) { - commands.put("METRICS {}", "line 1:10: "); - commands.put("ROW x = 1 | LOOKUP {} ON j", "line 1:21: "); + commands.put("METRICS {}", "line 1:9: "); + commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:22: "); } String lineNumber; for (String command : commands.keySet()) { @@ -568,11 +568,11 @@ public void testInvalidCharacterInIndexPattern() { // comma separated indices, with exclusions // Invalid index names after removing exclusion fail, when there is no index name with wildcard before it for (String command : commands.keySet()) { - if (command.contains("LOOKUP")) { + if (command.contains("LOOKUP_🐔")) { continue; } - lineNumber = command.contains("FROM") ? "line 1:21: " : "line 1:24: "; + lineNumber = command.contains("FROM") ? "line 1:20: " : "line 1:23: "; expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern"); @@ -582,10 +582,10 @@ public void testInvalidCharacterInIndexPattern() { // Invalid index names, except invalid DateMath, are ignored if there is an index name with wildcard before it String dateMathError = "unit [D] not supported for date math [/D]"; for (String command : commands.keySet()) { - if (command.contains("LOOKUP")) { + if (command.contains("LOOKUP_🐔")) { continue; } - lineNumber = command.contains("FROM") ? "line 1:10: " : "line 1:13: "; + lineNumber = command.contains("FROM") ? "line 1:9: " : "line 1:12: "; clustersAndIndices(command, "*", "-index#pattern"); clustersAndIndices(command, "index*", "-index#pattern"); clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>"); @@ -646,17 +646,17 @@ public void testInvalidQuotingAsMetricsIndexPattern() { public void testInvalidQuotingAsLookupIndexPattern() { assumeTrue("requires snapshot builds", Build.current().isSnapshot()); - expectError("ROW x = 1 | LOOKUP \"foo ON j", ": token recognition error at: '\"foo ON j'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo ON j", ": token recognition error at: '\"foo ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo ON j", ": token recognition error at: '\"foo ON j'"); - expectError("ROW x = 1 | LOOKUP foo\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 foo\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 foo\"\"\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); - expectError("ROW x = 1 | LOOKUP \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"bar\" ON j", ": token recognition error at: '\" ON j'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"foo\"\"bar\" ON j", ": extraneous input '\"bar\"' expecting 'on'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); - expectError("ROW x = 1 | LOOKUP \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", "line 1:31: mismatched input '\"bar\"' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"bar\"\"\" ON j", ": mismatched input 'bar' expecting 'on'"); + expectError("ROW x = 1 | LOOKUP_🐔 \"\"\"foo\"\"\"\"\"\"bar\"\"\" ON j", ": mismatched input '\"bar\"' expecting 'on'"); } public void testIdentifierAsFieldName() { @@ -885,18 +885,18 @@ public void testSuggestAvailableProcessingCommandsOnParsingError() { public void testDeprecatedIsNullFunction() { expectError( "from test | eval x = is_null(f)", - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); expectError( "row x = is_null(f)", - "line 1:10: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:9: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); if (Build.current().isSnapshot()) { expectError( "from test | eval x = ?fn1(f)", List.of(paramAsIdentifier("fn1", "IS_NULL")), - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); } } @@ -911,23 +911,23 @@ public void testMetadataFieldOnOtherSources() { } public void testMetadataFieldMultipleDeclarations() { - expectError("from test metadata _index, _version, _index", "1:39: metadata field [_index] already declared [@1:20]"); + expectError("from test metadata _index, _version, _index", "1:38: metadata field [_index] already declared [@1:20]"); } public void testMetadataFieldUnsupportedPrimitiveType() { - expectError("from test metadata _tier", "line 1:21: unsupported metadata field [_tier]"); + expectError("from test metadata _tier", "line 1:20: unsupported metadata field [_tier]"); } public void testMetadataFieldUnsupportedCustomType() { - expectError("from test metadata _feature", "line 1:21: unsupported metadata field [_feature]"); + expectError("from test metadata _feature", "line 1:20: unsupported metadata field [_feature]"); } public void testMetadataFieldNotFoundNonExistent() { - expectError("from test metadata _doesnot_compute", "line 1:21: unsupported metadata field [_doesnot_compute]"); + expectError("from test metadata _doesnot_compute", "line 1:20: unsupported metadata field [_doesnot_compute]"); } public void testMetadataFieldNotFoundNormalField() { - expectError("from test metadata emp_no", "line 1:21: unsupported metadata field [emp_no]"); + expectError("from test metadata emp_no", "line 1:20: unsupported metadata field [emp_no]"); } public void testDissectPattern() { @@ -985,13 +985,13 @@ public void testGrokPattern() { expectError( "row a = \"foo bar\" | GROK a \"%{NUMBER:foo} %{WORD:foo}\"", - "line 1:22: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + "line 1:21: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + " the attribute [foo] is defined multiple times with different types" ); expectError( "row a = \"foo\" | GROK a \"(?P.+)\"", - "line 1:18: Invalid grok pattern [(?P.+)]: [undefined group option]" + "line 1:17: Invalid grok pattern [(?P.+)]: [undefined group option]" ); } @@ -1015,7 +1015,7 @@ public void testLikeRLike() { expectError( "from a | where foo like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\"", - "line 1:17: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "line 1:16: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]" ); } @@ -1076,7 +1076,7 @@ public void testEnrich() { ); expectError( "from a | enrich typo:countries on foo", - "line 1:18: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" + "line 1:17: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" ); } @@ -1261,8 +1261,8 @@ public void testInvalidPositionalParams() { expectError( "from test | where x < ?0 and y < ?2", List.of(paramAsConstant(null, 5)), - "line 1:24: No parameter is defined for position 0, did you mean position 1?; " - + "line 1:35: No parameter is defined for position 2, did you mean position 1?" + "line 1:23: No parameter is defined for position 0, did you mean position 1?; " + + "line 1:34: No parameter is defined for position 2, did you mean position 1?" ); expectError( @@ -2050,7 +2050,7 @@ private void assertStringAsIndexPattern(String string, String statement) { private void assertStringAsLookupIndexPattern(String string, String statement) { if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> statement(statement)); - assertThat(e.getMessage(), containsString("line 1:14: LOOKUP is in preview and only available in SNAPSHOT build")); + assertThat(e.getMessage(), containsString("line 1:14: LOOKUP_🐔 is in preview and only available in SNAPSHOT build")); return; } var plan = statement(statement); @@ -2107,18 +2107,18 @@ public void testEnrichOnMatchField() { } public void testInlineConvertWithNonexistentType() { - expectError("ROW 1::doesnotexist", "line 1:9: Unknown data type named [doesnotexist]"); - expectError("ROW \"1\"::doesnotexist", "line 1:11: Unknown data type named [doesnotexist]"); - expectError("ROW false::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); - expectError("ROW abs(1)::doesnotexist", "line 1:14: Unknown data type named [doesnotexist]"); - expectError("ROW (1+2)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW 1::doesnotexist", "line 1:8: Unknown data type named [doesnotexist]"); + expectError("ROW \"1\"::doesnotexist", "line 1:10: Unknown data type named [doesnotexist]"); + expectError("ROW false::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); + expectError("ROW abs(1)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW (1+2)::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); } public void testLookup() { - String query = "ROW a = 1 | LOOKUP t ON j"; + String query = "ROW a = 1 | LOOKUP_🐔 t ON j"; if (Build.current().isSnapshot() == false) { var e = expectThrows(ParsingException.class, () -> statement(query)); - assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP' expecting {")); + assertThat(e.getMessage(), containsString("line 1:13: mismatched input 'LOOKUP_🐔' expecting {")); return; } var plan = statement(query); @@ -2131,7 +2131,7 @@ public void testLookup() { } public void testInlineConvertUnsupportedType() { - expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); + expectError("ROW 3::BYTE", "line 1:5: Unsupported conversion to type [BYTE]"); } public void testMetricsWithoutStats() { @@ -2300,7 +2300,6 @@ public void testMetricWithGroupKeyAsAgg() { } public void testMatchOperatorConstantQueryString() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); var plan = statement("FROM test | WHERE field:\"value\""); var filter = as(plan, Filter.class); var match = (Match) filter.condition(); @@ -2310,7 +2309,6 @@ public void testMatchOperatorConstantQueryString() { } public void testInvalidMatchOperator() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); expectError("from test | WHERE field:", "line 1:25: mismatched input '' expecting {QUOTED_STRING, "); expectError( "from test | WHERE field:CONCAT(\"hello\", \"world\")", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java index 6936c96a143d4..eea408914f4c5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/AbstractLogicalPlanSerializationTests.java @@ -8,13 +8,11 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelationSerializationTests; import java.util.ArrayList; @@ -32,12 +30,10 @@ public static LogicalPlan randomChild(int depth) { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(NamedExpression.getNamedWriteables()); + entries.addAll(PlanWritables.logical()); + entries.addAll(ExpressionWritables.aggregates()); + entries.addAll(ExpressionWritables.allExpressions()); + entries.addAll(BlockWritables.getNamedWriteables()); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java index 6b17e4efd4de7..7c75ea623b34f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinSerializationTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.io.IOException; import java.util.List; @@ -27,7 +28,7 @@ protected Join createTestInstance() { } private static JoinConfig randomJoinConfig() { - JoinType type = randomFrom(JoinType.values()); + JoinType type = randomFrom(JoinTypes.LEFT, JoinTypes.RIGHT, JoinTypes.INNER, JoinTypes.FULL, JoinTypes.CROSS); List matchFields = randomFieldAttributes(1, 10, false); List leftFields = randomFieldAttributes(1, 10, false); List rightFields = randomFieldAttributes(1, 10, false); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java index dde70d85ba259..13887fbd1740c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/JoinTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; -import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import java.util.ArrayList; import java.util.List; @@ -48,7 +48,7 @@ public void testExpressionsAndReferences() { Row left = new Row(Source.EMPTY, leftFields); Row right = new Row(Source.EMPTY, rightFields); - JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, matchFields, leftAttributes, rightAttributes); Join join = new Join(Source.EMPTY, left, right, joinConfig); // matchfields are a subset of the left and right fields, so they don't contribute to the size of the references set. @@ -88,7 +88,7 @@ public void testTransformExprs() { Row left = new Row(Source.EMPTY, leftFields); Row right = new Row(Source.EMPTY, rightFields); - JoinConfig joinConfig = new JoinConfig(JoinType.LEFT, matchFields, leftAttributes, rightAttributes); + JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, matchFields, leftAttributes, rightAttributes); Join join = new Join(Source.EMPTY, left, right, joinConfig); assertTrue(join.config().matchFields().stream().allMatch(ref -> ref.dataType().equals(DataType.INTEGER))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java index ccb27b41f2ed6..1f56150977d99 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalSupplierTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.test.AbstractWireTestCase; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; @@ -81,6 +82,6 @@ protected boolean shouldBeSame(LocalSupplier newInstance) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java index 4b74114a0e01c..7689b80515880 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/AbstractPhysicalPlanSerializationTests.java @@ -9,16 +9,13 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.tree.Node; -import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.plan.AbstractNodeSerializationTests; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.PlanWritables; import java.util.ArrayList; import java.util.List; @@ -50,13 +47,10 @@ public static Integer randomEstimatedRowSize() { @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { List entries = new ArrayList<>(); - entries.addAll(PhysicalPlan.getNamedWriteables()); - entries.addAll(LogicalPlan.getNamedWriteables()); - entries.addAll(AggregateFunction.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Block.getNamedWriteables()); - entries.addAll(NamedExpression.getNamedWriteables()); + entries.addAll(PlanWritables.getNamedWriteables()); + entries.addAll(ExpressionWritables.aggregates()); + entries.addAll(ExpressionWritables.allExpressions()); + entries.addAll(BlockWritables.getNamedWriteables()); entries.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); // Query builders entries.add(Add.ENTRY); // Used by the eval tests return new NamedWriteableRegistry(entries); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index f60e5384e1a6f..ff9e45a9f9233 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -146,6 +146,7 @@ private LocalExecutionPlanner planner() throws IOException { null, null, null, + null, esPhysicalOperationProviders() ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java index cf90cf96fe683..57210fda07f2b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/QueryTranslatorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -46,7 +47,7 @@ private static Analyzer makeAnalyzer(String mappingFileName) { return new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, new EnrichResolution()), - new Verifier(new Metrics(new EsqlFunctionRegistry())) + new Verifier(new Metrics(new EsqlFunctionRegistry()), new XPackLicenseState(() -> 0L)) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java index 1f35bb5312b20..b010616cd7cf7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/ConfigurationSerializationTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.esql.Column; @@ -110,6 +110,6 @@ protected Configuration mutateInstance(Configuration in) { @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(Block.getNamedWriteables()); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 116df21a33ac0..b323efad2b4c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.IndexMode; import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -102,7 +103,7 @@ public void testFailedMetric() { return null; }).when(esqlClient).execute(eq(EsqlResolveFieldsAction.TYPE), any(), any()); - var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP); + var planExecutor = new PlanExecutor(indexResolver, MeterRegistry.NOOP, new XPackLicenseState(() -> 0L)); var enrichResolver = mockEnrichResolver(); var request = new EsqlQueryRequest(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index 5e6588d2295f9..eda906b147956 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.stats; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.common.stats.Counters; import org.elasticsearch.xpack.esql.analysis.Verifier; @@ -205,7 +206,7 @@ public void testTwoWhereQuery() { public void testTwoQueriesExecuted() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -252,7 +253,7 @@ public void testTwoQueriesExecuted() { public void testMultipleFunctions() { Metrics metrics = new Metrics(new EsqlFunctionRegistry()); - Verifier verifier = new Verifier(metrics); + Verifier verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); esqlWithVerifier(""" from employees | where languages > 2 @@ -526,7 +527,7 @@ private Counters esql(String esql, Verifier v) { Metrics metrics = null; if (v == null) { metrics = new Metrics(new EsqlFunctionRegistry()); - verifier = new Verifier(metrics); + verifier = new Verifier(metrics, new XPackLicenseState(() -> 0L)); } analyzer(verifier).analyze(parser.createStatement(esql)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index 82f0ebf316508..c1d94933537f0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -43,7 +43,9 @@ import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Grok; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; +import org.elasticsearch.xpack.esql.plan.logical.join.JoinTypes; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.Stat; import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType; @@ -436,8 +438,9 @@ public void accept(Page page) { } else if (argClass == Integer.class) { return randomInt(); } else if (argClass == JoinType.class) { - return JoinType.LEFT; + return JoinTypes.LEFT; } + if (Expression.class == argClass) { /* * Rather than use any old subclass of expression lets @@ -488,6 +491,15 @@ public void accept(Page page) { if (argClass == Configuration.class) { return randomConfiguration(); } + if (argClass == JoinConfig.class) { + return new JoinConfig( + JoinTypes.LEFT, + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()), + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()), + List.of(UnresolvedAttributeTests.randomUnresolvedAttribute()) + ); + } + try { return mock(argClass); } catch (MockitoException e) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java index f533c20975aff..987ab103cf80b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/type/MultiTypeEsFieldTests.java @@ -10,14 +10,13 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.test.AbstractWireTestCase; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; -import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.esql.expression.ExpressionWritables; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; @@ -92,9 +91,8 @@ protected MultiTypeEsField mutateInstance(MultiTypeEsField instance) throws IOEx @Override protected final NamedWriteableRegistry getNamedWriteableRegistry() { - List entries = new ArrayList<>(UnaryScalarFunction.getNamedWriteables()); - entries.addAll(Attribute.getNamedWriteables()); - entries.addAll(Expression.getNamedWriteables()); + List entries = new ArrayList<>(ExpressionWritables.allExpressions()); + entries.addAll(ExpressionWritables.unaryScalars()); return new NamedWriteableRegistry(entries); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java index 3db834bb579ff..69767ce0b24f0 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/DefaultEndPointsIT.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; @@ -16,9 +19,12 @@ import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -108,4 +114,37 @@ private static void assertDefaultE5Config(Map modelConfig) { Matchers.is(Map.of("enabled", true, "min_number_of_allocations", 0, "max_number_of_allocations", 32)) ); } + + public void testMultipleInferencesTriggeringDownloadAndDeploy() throws InterruptedException { + int numParallelRequests = 4; + var latch = new CountDownLatch(numParallelRequests); + var errors = new ArrayList(); + + var listener = new ResponseListener() { + @Override + public void onSuccess(Response response) { + latch.countDown(); + } + + @Override + public void onFailure(Exception exception) { + errors.add(exception); + latch.countDown(); + } + }; + + var inputs = List.of("Hello World", "Goodnight moon"); + var queryParams = Map.of("timeout", "120s"); + for (int i = 0; i < numParallelRequests; i++) { + var request = createInferenceRequest( + Strings.format("_inference/%s", ElasticsearchInternalService.DEFAULT_ELSER_ID), + inputs, + queryParams + ); + client().performRequestAsync(request, listener); + } + + latch.await(); + assertThat(errors.toString(), errors, empty()); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 6790b9bb14c5a..4e32ef99d06dd 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -373,12 +373,17 @@ protected Map infer(String modelId, TaskType taskType, List inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + protected Request createInferenceRequest(String endpoint, List input, Map queryParameters) { var request = new Request("POST", endpoint); request.setJsonEntity(jsonBody(input)); if (queryParameters.isEmpty() == false) { request.addParameters(queryParameters); } + return request; + } + + private Map inferInternal(String endpoint, List input, Map queryParameters) throws IOException { + var request = createInferenceRequest(endpoint, input, queryParameters); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 591db6db8495a..78e064b42bbb2 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -432,7 +432,7 @@ public void testUnsupportedStream() throws Exception { assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get("task_type")); try { - var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(10))); + var events = streamInferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomUUID())); assertThat(events.size(), equalTo(2)); events.forEach(event -> { switch (event.name()) { @@ -457,7 +457,7 @@ public void testSupportedStream() throws Exception { assertEquals(modelId, singleModel.get("inference_id")); assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type")); - var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphaOfLength(10)).toList(); + var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomUUID()).toList(); try { var events = streamInferOnMockService(modelId, TaskType.COMPLETION, input); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 02bddb6076d69..2320cca8295d1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -63,6 +63,7 @@ import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandInternalTextEmbeddingServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.CustomElandRerankTaskSettings; +import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticRerankerServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElserMlNodeTaskSettings; @@ -415,7 +416,13 @@ private static void addInternalNamedWriteables(List namedWriteables) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java index bbc5082d45004..b74e473155aec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/RateLimiter.java @@ -28,6 +28,14 @@ * * By setting the accumulated tokens limit to a value greater than zero, it effectively allows bursts of traffic. If the accumulated * tokens limit is set to zero, it will force the acquiring thread to wait on each call. + * + * Example: + * Time unit: Second + * Tokens to produce per time unit: 10 + * Limit for tokens in bucket: 100 + * + * Tokens in bucket after n seconds (n second -> tokens in bucket): + * 1 sec -> 10 tokens, 2 sec -> 20 tokens, ... , 10 sec -> 100 tokens (bucket full), ... 200 sec -> 100 tokens (no increase in tokens) */ public class RateLimiter { @@ -76,6 +84,7 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double throw new IllegalArgumentException(Strings.format("Tokens per time unit must be less than or equal to %s", Double.MAX_VALUE)); } + // If the new token limit is smaller than what we've accumulated already we need to drop tokens to meet the new token limit accumulatedTokens = Math.min(accumulatedTokens, newAccumulatedTokensLimit); accumulatedTokensLimit = newAccumulatedTokensLimit; @@ -88,7 +97,8 @@ public final synchronized void setRate(double newAccumulatedTokensLimit, double } /** - * Causes the thread to wait until the tokens are available + * Causes the thread to wait until the tokens are available. + * This reserves token in advance leading to a reduction of accumulated tokens. * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here * @throws InterruptedException _ */ @@ -130,6 +140,7 @@ private static void validateTokenRequest(int tokens) { /** * Returns the amount of time to wait for the tokens to become available. + * This reserves tokens in advance leading to a reduction of accumulated tokens. * @param tokens the number of items of work that should be throttled, typically you'd pass a value of 1 here. Must be greater than 0. * @return the amount of time to wait */ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index fc070965f29c2..f743b94df3810 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -156,6 +156,8 @@ protected void putModel(Model model, ActionListener listener) { putBuiltInModel(e5Model.getServiceSettings().modelId(), listener); } else if (model instanceof ElserInternalModel elserModel) { putBuiltInModel(elserModel.getServiceSettings().modelId(), listener); + } else if (model instanceof ElasticRerankerModel elasticRerankerModel) { + putBuiltInModel(elasticRerankerModel.getServiceSettings().modelId(), listener); } else if (model instanceof CustomElandModel) { logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); listener.onResponse(Boolean.TRUE); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java index b710b24cbda31..b76de5eeedbfc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java @@ -7,14 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class CustomElandModel extends ElasticsearchInternalModel { @@ -39,31 +34,10 @@ public CustomElandModel( } @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the inference as the custom eland model [{0}] for this platform cannot be found." - + " Custom models need to be loaded into the cluster with eland before they can be started.", - internalServiceSettings.modelId() - ) - ); - return; - } - listener.onFailure(e); - } - }; + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + + modelId + + "] as the model cannot be found." + + " Custom models need to be loaded into the cluster with Eland before they can be started."; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java index 724c7a8f0a166..ce6c6258d0393 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticDeployedModel.java @@ -36,6 +36,11 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA throw new IllegalStateException("cannot start model that uses an existing deployment"); } + @Override + protected String modelNotFoundErrorMessage(String modelId) { + throw new IllegalStateException("cannot start model [" + modelId + "] that uses an existing deployment"); + } + @Override public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java new file mode 100644 index 0000000000000..115cc9f05599a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerModel.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.ChunkingSettings; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +public class ElasticRerankerModel extends ElasticsearchInternalModel { + + public ElasticRerankerModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticRerankerServiceSettings serviceSettings, + ChunkingSettings chunkingSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, chunkingSettings); + } + + @Override + public ElasticRerankerServiceSettings getServiceSettings() { + return (ElasticRerankerServiceSettings) super.getServiceSettings(); + } + + @Override + public ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException("Could not start the Elastic Reranker Endpoint due to [{}]", e, e.getMessage()) + ); + return; + } + listener.onFailure(e); + } + }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java new file mode 100644 index 0000000000000..316dc092e03c7 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticRerankerServiceSettings.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; + +import java.io.IOException; +import java.util.Map; + +public class ElasticRerankerServiceSettings extends ElasticsearchInternalServiceSettings { + + public static final String NAME = "elastic_reranker_service_settings"; + + public ElasticRerankerServiceSettings(ElasticsearchInternalServiceSettings other) { + super(other); + } + + public ElasticRerankerServiceSettings( + Integer numAllocations, + int numThreads, + String modelId, + AdaptiveAllocationsSettings adaptiveAllocationsSettings + ) { + super(numAllocations, numThreads, modelId, adaptiveAllocationsSettings); + } + + public ElasticRerankerServiceSettings(StreamInput in) throws IOException { + super(in); + } + + /** + * Parse the ElasticRerankerServiceSettings from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The builder + */ + public static Builder fromRequestMap(Map map) { + ValidationException validationException = new ValidationException(); + var baseSettings = ElasticsearchInternalServiceSettings.fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return baseSettings; + } + + @Override + public String getWriteableName() { + return ElasticRerankerServiceSettings.NAME; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 2405243f302bc..aa12bf0c645c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.TimeValue; @@ -15,8 +18,10 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; @@ -79,10 +84,38 @@ public StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentA return startRequest; } - public abstract ActionListener getCreateTrainedModelAssignmentActionListener( + public ActionListener getCreateTrainedModelAssignmentActionListener( Model model, ActionListener listener - ); + ) { + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + var cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof ResourceNotFoundException) { + listener.onFailure(new ResourceNotFoundException(modelNotFoundErrorMessage(internalServiceSettings.modelId()))); + return; + } else if (cause instanceof ElasticsearchStatusException statusException) { + if (statusException.status() == RestStatus.CONFLICT + && statusException.getRootCause() instanceof ResourceAlreadyExistsException) { + // Deployment is already started + listener.onResponse(Boolean.TRUE); + } + return; + } + listener.onFailure(e); + } + }; + } + + protected String modelNotFoundErrorMessage(String modelId) { + return "Could not deploy model [" + modelId + "] as the model cannot be found."; + } public boolean usesExistingDeployment() { return internalServiceSettings.getDeploymentId() != null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index fe83acc8574aa..718aeae979fe9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -97,6 +97,8 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 ); + public static final String RERANKER_ID = ".rerank-v1"; + public static final int EMBEDDING_MAX_BATCH_SIZE = 10; public static final String DEFAULT_ELSER_ID = ".elser-2-elasticsearch"; public static final String DEFAULT_E5_ID = ".multilingual-e5-small-elasticsearch"; @@ -223,6 +225,8 @@ public void parseRequestConfig( ) ) ); + } else if (RERANKER_ID.equals(modelId)) { + rerankerCase(inferenceEntityId, taskType, config, serviceSettingsMap, chunkingSettings, modelListener); } else { customElandCase(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, chunkingSettings, modelListener); } @@ -323,6 +327,31 @@ private static CustomElandInternalServiceSettings elandServiceSettings( }; } + private void rerankerCase( + String inferenceEntityId, + TaskType taskType, + Map config, + Map serviceSettingsMap, + ChunkingSettings chunkingSettings, + ActionListener modelListener + ) { + + var esServiceSettingsBuilder = ElasticsearchInternalServiceSettings.fromRequestMap(serviceSettingsMap); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + modelListener.onResponse( + new ElasticRerankerModel( + inferenceEntityId, + taskType, + NAME, + new ElasticRerankerServiceSettings(esServiceSettingsBuilder.build()), + chunkingSettings + ) + ); + } + private void e5Case( String inferenceEntityId, TaskType taskType, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java index 8d2f59171a601..2594f18db3fb5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElserInternalModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class ElserInternalModel extends ElasticsearchInternalModel { @@ -37,31 +32,4 @@ public ElserInternalServiceSettings getServiceSettings() { public ElserMlNodeTaskSettings getTaskSettings() { return (ElserMlNodeTaskSettings) super.getTaskSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the ELSER service as the ELSER model for this platform cannot be found." - + " ELSER needs to be downloaded before it can be started." - ) - ); - return; - } - listener.onFailure(e); - } - }; - } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java index fee00d04d940b..2dcf91140c995 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallModel.java @@ -7,13 +7,8 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkingSettings; -import org.elasticsearch.inference.Model; import org.elasticsearch.inference.TaskType; -import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; public class MultilingualE5SmallModel extends ElasticsearchInternalModel { @@ -31,34 +26,4 @@ public MultilingualE5SmallModel( public MultilingualE5SmallInternalServiceSettings getServiceSettings() { return (MultilingualE5SmallInternalServiceSettings) super.getServiceSettings(); } - - @Override - public ActionListener getCreateTrainedModelAssignmentActionListener( - Model model, - ActionListener listener - ) { - - return new ActionListener<>() { - @Override - public void onResponse(CreateTrainedModelAssignmentAction.Response response) { - listener.onResponse(Boolean.TRUE); - } - - @Override - public void onFailure(Exception e) { - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { - listener.onFailure( - new ResourceNotFoundException( - "Could not start the TextEmbeddingService service as the " - + "Multilingual-E5-Small model for this platform cannot be found." - + " Multilingual-E5-Small needs to be downloaded before it can be started" - ) - ); - return; - } - listener.onFailure(e); - } - }; - } - } diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index fe406722ae1e2..8d8ad94d608d7 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -36,7 +36,7 @@ protected Settings restClientSettings() { var baseSettings = super.restClientSettings(); return Settings.builder() .put(baseSettings) - .put(CLIENT_SOCKET_TIMEOUT, "120s") // Long timeout for model download + .put(CLIENT_SOCKET_TIMEOUT, "300s") // Long timeout for model download .build(); } diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 2f61558ef7e5a..76a4bd5aff777 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -33,12 +33,6 @@ dependencies { tasks.named('yamlRestTest').configure { usesDefaultDistribution() - - /**************************************************************** - * Enable QA/rest integration tests for snapshot builds only * - * TODO: Enable for all builds upon this feature release * - ****************************************************************/ - enabled = buildParams.isSnapshotBuild() } /********************************** diff --git a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java index 2bc23c7d457dd..7323f7d6d1a4e 100644 --- a/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java +++ b/x-pack/plugin/kql/src/test/java/org/elasticsearch/xpack/kql/query/KqlQueryBuilderTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.kql.query; import org.apache.lucene.search.Query; +import org.elasticsearch.Build; import org.elasticsearch.core.Strings; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -21,6 +22,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.xpack.kql.KqlPlugin; import org.hamcrest.Matchers; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collection; @@ -34,6 +36,10 @@ import static org.hamcrest.Matchers.nullValue; public class KqlQueryBuilderTests extends AbstractQueryTestCase { + @BeforeClass + protected static void ensureSnapshotBuild() { + assumeTrue("requires snapshot builds", Build.current().isSnapshot()); + } @Override protected Collection> getPlugins() { diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 8d7a813b206d8..8930ff23fb3b0 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -271,7 +271,7 @@ public void testDateHistogramAggregation() throws IOException { } public void testEsqlSource() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -287,7 +287,7 @@ public void testEsqlSource() throws IOException { } public void testEsqlTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -302,7 +302,7 @@ public void testEsqlTermsAggregation() throws IOException { } public void testEsqlTermsAggregationByMethod() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 0eb0754985c94..04d12fd51bae7 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -48,6 +48,11 @@ public LogsDBPlugin(Settings settings) { public Collection createComponents(PluginServices services) { licenseService.setLicenseState(XPackPlugin.getSharedLicenseState()); var clusterSettings = services.clusterService().getClusterSettings(); + // The `cluster.logsdb.enabled` setting is registered by this plugin, but its value may be updated by other plugins + // before this plugin registers its settings update consumer below. This means we might miss updates that occurred earlier. + // To handle this, we explicitly fetch the current `cluster.logsdb.enabled` setting value from the cluster settings + // and update it, ensuring we capture any prior changes. + logsdbIndexModeSettingsProvider.updateClusterIndexModeLogsdbEnabled(clusterSettings.get(CLUSTER_LOGSDB_ENABLED)); clusterSettings.addSettingsUpdateConsumer(FALLBACK_SETTING, licenseService::setSyntheticSourceFallback); clusterSettings.addSettingsUpdateConsumer( CLUSTER_LOGSDB_ENABLED, @@ -62,10 +67,13 @@ public Collection getAdditionalIndexSettingProviders(Index if (DiscoveryNode.isStateless(settings)) { return List.of(logsdbIndexModeSettingsProvider); } - return List.of( - new SyntheticSourceIndexSettingsProvider(licenseService, parameters.mapperServiceFactory(), logsdbIndexModeSettingsProvider), - logsdbIndexModeSettingsProvider + var syntheticSettingProvider = new SyntheticSourceIndexSettingsProvider( + licenseService, + parameters.mapperServiceFactory(), + logsdbIndexModeSettingsProvider, + () -> parameters.clusterService().state().nodes().getMinSupportedIndexVersion() ); + return List.of(syntheticSettingProvider, logsdbIndexModeSettingsProvider); } @Override diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java index e87f10ec19916..1f38ecda19515 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProvider.java @@ -26,6 +26,7 @@ import java.io.IOException; import java.time.Instant; import java.util.List; +import java.util.function.Supplier; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_PATH; @@ -39,15 +40,18 @@ final class SyntheticSourceIndexSettingsProvider implements IndexSettingProvider private final SyntheticSourceLicenseService syntheticSourceLicenseService; private final CheckedFunction mapperServiceFactory; private final LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider; + private final Supplier createdIndexVersion; SyntheticSourceIndexSettingsProvider( SyntheticSourceLicenseService syntheticSourceLicenseService, CheckedFunction mapperServiceFactory, - LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider + LogsdbIndexModeSettingsProvider logsdbIndexModeSettingsProvider, + Supplier createdIndexVersion ) { this.syntheticSourceLicenseService = syntheticSourceLicenseService; this.mapperServiceFactory = mapperServiceFactory; this.logsdbIndexModeSettingsProvider = logsdbIndexModeSettingsProvider; + this.createdIndexVersion = createdIndexVersion; } @Override @@ -148,7 +152,7 @@ private IndexMetadata buildIndexMetadataForMapperService( ); int shardReplicas = indexTemplateAndCreateRequestSettings.getAsInt(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0); var finalResolvedSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_VERSION_CREATED, createdIndexVersion.get()) .put(indexTemplateAndCreateRequestSettings) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, dummyShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, shardReplicas) diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java index 2d8723a0d8c25..1f5d26eaedf34 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/SyntheticSourceIndexSettingsProviderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.MockLicenseState; @@ -54,7 +55,7 @@ public void setup() { provider = new SyntheticSourceIndexSettingsProvider(syntheticSourceLicenseService, im -> { newMapperServiceCounter.incrementAndGet(); return MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()); - }, getLogsdbIndexModeSettingsProvider(false)); + }, getLogsdbIndexModeSettingsProvider(false), IndexVersion::current); newMapperServiceCounter.set(0); } @@ -336,7 +337,8 @@ public void testGetAdditionalIndexSettingsDowngradeFromSyntheticSourceFileMatch( provider = new SyntheticSourceIndexSettingsProvider( syntheticSourceLicenseService, im -> MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), im.getSettings(), im.getIndex().getName()), - getLogsdbIndexModeSettingsProvider(true) + getLogsdbIndexModeSettingsProvider(true), + IndexVersion::current ); final Settings settings = Settings.EMPTY; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml index 33fedce3b59c1..792df4dbf639e 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml +++ b/x-pack/plugin/logsdb/src/yamlRestTest/resources/rest-api-spec/test/40_source_mode_setting.yml @@ -470,13 +470,7 @@ create an index with time_series index mode and synthetic source: indices.get_settings: index: "test_time_series_index_mode_synthetic" - match: { test_time_series_index_mode_synthetic.settings.index.mode: time_series } - - - - do: - indices.get_mapping: - index: test_time_series_index_mode_synthetic - - - match: { test_time_series_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_time_series_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with logsdb index mode and synthetic source: @@ -493,12 +487,7 @@ create an index with logsdb index mode and synthetic source: indices.get_settings: index: "test_logsdb_index_mode_synthetic" - match: { test_logsdb_index_mode_synthetic.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_synthetic - - - match: { test_logsdb_index_mode_synthetic.mappings._source.mode: synthetic } + - match: { test_logsdb_index_mode_synthetic.settings.index.mapping.source.mode: synthetic } --- create an index with time_series index mode and stored source: @@ -524,12 +513,7 @@ create an index with time_series index mode and stored source: indices.get_settings: index: "test_time_series_index_mode_undefined" - match: { test_time_series_index_mode_undefined.settings.index.mode: time_series } - - - do: - indices.get_mapping: - index: test_time_series_index_mode_undefined - - - match: { test_time_series_index_mode_undefined.mappings._source.mode: stored } + - match: { test_time_series_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with logsdb index mode and stored source: @@ -546,12 +530,7 @@ create an index with logsdb index mode and stored source: indices.get_settings: index: "test_logsdb_index_mode_undefined" - match: { test_logsdb_index_mode_undefined.settings.index.mode: logsdb } - - - do: - indices.get_mapping: - index: test_logsdb_index_mode_undefined - - - match: { test_logsdb_index_mode_undefined.mappings._source.mode: stored } + - match: { test_logsdb_index_mode_undefined.settings.index.mapping.source.mode: stored } --- create an index with time_series index mode and disabled source: diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 216f82552353b..fa5d9428bb0c6 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.constantkeyword.mapper; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -58,7 +57,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.stream.Stream; /** * A {@link FieldMapper} that assigns every document the same value. @@ -356,40 +354,14 @@ protected SyntheticSourceSupport syntheticSourceSupport() { return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); } - var loader = new SourceLoader.SyntheticFieldLoader() { - @Override - public Stream> storedFieldLoaders() { - return Stream.of(); - } - - @Override - public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) { - return docId -> true; - } - - @Override - public boolean hasValue() { - return true; - } - - @Override - public void write(XContentBuilder b) throws IOException { - if (fieldType().value != null) { - b.field(leafName(), fieldType().value); - } - } - - @Override - public void reset() { - // NOOP - } - - @Override - public String fieldName() { - return fullPath(); - } - }; + /* + If there was no value in the document, synthetic source should not have the value too. + This is consistent with stored source behavior and is important for scenarios + like reindexing into an index that has a different value of this value in the mapping. - return new SyntheticSourceSupport.Native(loader); + In order to do that we use fallback logic which implements exactly such logic (_source only contains value + if it was in the original document). + */ + return new SyntheticSourceSupport.Fallback(); } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index 4661fe77e8b11..2b9170afdfd70 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -333,6 +333,17 @@ public void testNullValueSyntheticSource() throws IOException { assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); } + public void testNoValueInDocumentSyntheticSource() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("field"); + b.field("type", "constant_keyword"); + b.field("value", randomAlphaOfLength(5)); + b.endObject(); + })).documentMapper(); + + assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); + } + @Override protected boolean supportsEmptyInputArray() { return false; diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java new file mode 100644 index 0000000000000..929dac6ee357a --- /dev/null +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/DownloadTaskRemovedListener.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.packageloader.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.tasks.RemovedTaskListener; +import org.elasticsearch.tasks.Task; + +public record DownloadTaskRemovedListener(ModelDownloadTask trackedTask, ActionListener listener) + implements + RemovedTaskListener { + + @Override + public void onRemoved(Task task) { + if (task.getId() == trackedTask.getId()) { + if (trackedTask.getTaskException() == null) { + listener.onResponse(AcknowledgedResponse.TRUE); + } else { + listener.onFailure(trackedTask.getTaskException()); + } + } + } +} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java index 59977bd418e11..dd09c3cf65fec 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTask.java @@ -13,6 +13,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.MlTasks; import java.io.IOException; import java.util.Map; @@ -51,9 +52,12 @@ public void writeTo(StreamOutput out) throws IOException { } private final AtomicReference downloadProgress = new AtomicReference<>(new DownLoadProgress(0, 0)); + private final String modelId; + private volatile Exception taskException; - public ModelDownloadTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { - super(id, type, action, description, parentTaskId, headers); + public ModelDownloadTask(long id, String type, String action, String modelId, TaskId parentTaskId, Map headers) { + super(id, type, action, taskDescription(modelId), parentTaskId, headers); + this.modelId = modelId; } void setProgress(int totalParts, int downloadedParts) { @@ -65,4 +69,19 @@ public DownloadStatus getStatus() { return new DownloadStatus(downloadProgress.get()); } + public String getModelId() { + return modelId; + } + + public void setTaskException(Exception exception) { + this.taskException = exception; + } + + public Exception getTaskException() { + return taskException; + } + + public static String taskDescription(String modelId) { + return MlTasks.downloadModelTaskDescription(modelId); + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 76b7781b1cffe..2a14a8761e357 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -30,7 +30,6 @@ import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; @@ -42,6 +41,9 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -49,7 +51,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; -import static org.elasticsearch.xpack.core.ml.MlTasks.downloadModelTaskDescription; public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction { @@ -57,6 +58,7 @@ public class TransportLoadTrainedModelPackage extends TransportMasterNodeAction< private final Client client; private final CircuitBreakerService circuitBreakerService; + final Map> taskRemovedListenersByModelId; @Inject public TransportLoadTrainedModelPackage( @@ -81,6 +83,7 @@ public TransportLoadTrainedModelPackage( ); this.client = new OriginSettingClient(client, ML_ORIGIN); this.circuitBreakerService = circuitBreakerService; + taskRemovedListenersByModelId = new HashMap<>(); } @Override @@ -91,6 +94,12 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) @Override protected void masterOperation(Task task, Request request, ClusterState state, ActionListener listener) throws Exception { + if (handleDownloadInProgress(request.getModelId(), request.isWaitForCompletion(), listener)) { + logger.debug("Existing download of model [{}] in progress", request.getModelId()); + // download in progress, nothing to do + return; + } + ModelDownloadTask downloadTask = createDownloadTask(request); try { @@ -107,7 +116,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); - importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask); + importModel(client, () -> unregisterTask(downloadTask), request, modelImporter, downloadTask, downloadCompleteListener); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); @@ -124,22 +133,91 @@ private ParentTaskAssigningClient getParentTaskAssigningClient(Task originTask) return new ParentTaskAssigningClient(client, parentTaskId); } + /** + * Look for a current download task of the model and optionally wait + * for that task to complete if there is one. + * synchronized with {@code unregisterTask} to prevent the task being + * removed before the remove listener is added. + * @param modelId Model being downloaded + * @param isWaitForCompletion Wait until the download completes before + * calling the listener + * @param listener Model download listener + * @return True if a download task is in progress + */ + synchronized boolean handleDownloadInProgress( + String modelId, + boolean isWaitForCompletion, + ActionListener listener + ) { + var description = ModelDownloadTask.taskDescription(modelId); + var tasks = taskManager.getCancellableTasks().values(); + + ModelDownloadTask inProgress = null; + for (var task : tasks) { + if (description.equals(task.getDescription()) && task instanceof ModelDownloadTask downloadTask) { + inProgress = downloadTask; + break; + } + } + + if (inProgress != null) { + if (isWaitForCompletion == false) { + // Not waiting for the download to complete, it is enough that the download is in progress + // Respond now not when the download completes + listener.onResponse(AcknowledgedResponse.TRUE); + return true; + } + // Otherwise register a task removed listener which is called + // once the tasks is complete and unregistered + var tracker = new DownloadTaskRemovedListener(inProgress, listener); + taskRemovedListenersByModelId.computeIfAbsent(modelId, s -> new ArrayList<>()).add(tracker); + taskManager.registerRemovedTaskListener(tracker); + return true; + } + + return false; + } + + /** + * Unregister the completed task triggering any remove task listeners. + * This method is synchronized to prevent the task being removed while + * {@code waitForExistingDownload} is in progress. + * @param task The completed task + */ + synchronized void unregisterTask(ModelDownloadTask task) { + taskManager.unregister(task); // unregister will call the on remove function + + var trackers = taskRemovedListenersByModelId.remove(task.getModelId()); + if (trackers != null) { + for (var tracker : trackers) { + taskManager.unregisterRemovedTaskListener(tracker); + } + } + } + /** * This is package scope so that we can test the logic directly. - * This should only be called from the masterOperation method and the tests + * This should only be called from the masterOperation method and the tests. + * This method is static for testing. * * @param auditClient a client which should only be used to send audit notifications. This client cannot be associated with the passed * in task, that way when the task is cancelled the notification requests can * still be performed. If it is associated with the task (i.e. via ParentTaskAssigningClient), * then the requests will throw a TaskCancelledException. + * @param unregisterTaskFn Runnable to unregister the task. Because this is a static function + * a lambda is used rather than the instance method. + * @param request The download request + * @param modelImporter The importer + * @param task Download task + * @param listener Listener */ static void importModel( Client auditClient, - TaskManager taskManager, + Runnable unregisterTaskFn, Request request, ModelImporter modelImporter, - ActionListener listener, - Task task + ModelDownloadTask task, + ActionListener listener ) { final String modelId = request.getModelId(); final long relativeStartNanos = System.nanoTime(); @@ -155,9 +233,12 @@ static void importModel( Level.INFO ); listener.onResponse(AcknowledgedResponse.TRUE); - }, exception -> listener.onFailure(processException(auditClient, modelId, exception))); + }, exception -> { + task.setTaskException(exception); + listener.onFailure(processException(auditClient, modelId, exception)); + }); - modelImporter.doImport(ActionListener.runAfter(finishListener, () -> taskManager.unregister(task))); + modelImporter.doImport(ActionListener.runAfter(finishListener, unregisterTaskFn)); } static Exception processException(Client auditClient, String modelId, Exception e) { @@ -197,14 +278,7 @@ public TaskId getParentTask() { @Override public ModelDownloadTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new ModelDownloadTask( - id, - type, - action, - downloadModelTaskDescription(request.getModelId()), - parentTaskId, - headers - ); + return new ModelDownloadTask(id, type, action, request.getModelId(), parentTaskId, headers); } }, false); } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index cbcfd5b760779..3486ce6af0db5 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -10,13 +10,19 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.ml.action.AuditMlNotificationAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; @@ -27,9 +33,13 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; +import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_ACTION; +import static org.elasticsearch.xpack.core.ml.MlTasks.MODEL_IMPORT_TASK_TYPE; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; @@ -37,6 +47,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportLoadTrainedModelPackageTests extends ESTestCase { private static final String MODEL_IMPORT_FAILURE_MSG_FORMAT = "Model importing failed due to %s [%s]"; @@ -44,17 +55,10 @@ public class TransportLoadTrainedModelPackageTests extends ESTestCase { public void testSendsFinishedUploadNotification() { var uploader = createUploader(null); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); var client = mock(Client.class); - TransportLoadTrainedModelPackage.importModel( - client, - taskManager, - createRequestWithWaiting(), - uploader, - ActionListener.noop(), - task - ); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, ActionListener.noop()); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the start and finish messages @@ -108,32 +112,63 @@ public void testSendsWarningNotificationForTaskCancelledException() throws Excep public void testCallsOnResponseWithAcknowledgedResponse() throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(null); var responseRef = new AtomicReference(); var listener = ActionListener.wrap(responseRef::set, e -> fail("received an exception: " + e.getMessage())); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel(client, () -> {}, createRequestWithWaiting(), uploader, task, listener); assertThat(responseRef.get(), is(AcknowledgedResponse.TRUE)); } public void testDoesNotCallListenerWhenNotWaitingForCompletion() { var uploader = mock(ModelImporter.class); var client = mock(Client.class); - var taskManager = mock(TaskManager.class); - var task = mock(Task.class); - + var task = mock(ModelDownloadTask.class); TransportLoadTrainedModelPackage.importModel( client, - taskManager, + () -> {}, createRequestWithoutWaiting(), uploader, - ActionListener.running(ESTestCase::fail), - task + task, + ActionListener.running(ESTestCase::fail) ); } + public void testWaitForExistingDownload() { + var taskManager = mock(TaskManager.class); + var modelId = "foo"; + var task = new ModelDownloadTask(1L, MODEL_IMPORT_TASK_TYPE, MODEL_IMPORT_TASK_ACTION, modelId, new TaskId("node", 1L), Map.of()); + when(taskManager.getCancellableTasks()).thenReturn(Map.of(1L, task)); + + var transportService = mock(TransportService.class); + when(transportService.getTaskManager()).thenReturn(taskManager); + + var action = new TransportLoadTrainedModelPackage( + transportService, + mock(ClusterService.class), + mock(ThreadPool.class), + mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), + mock(Client.class), + mock(CircuitBreakerService.class) + ); + + assertTrue(action.handleDownloadInProgress(modelId, true, ActionListener.noop())); + verify(taskManager).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + // With wait for completion == false no new removed listener will be added + assertTrue(action.handleDownloadInProgress(modelId, false, ActionListener.noop())); + verify(taskManager, times(1)).registerRemovedTaskListener(any()); + assertThat(action.taskRemovedListenersByModelId.entrySet(), hasSize(1)); + assertThat(action.taskRemovedListenersByModelId.get(modelId), hasSize(1)); + + assertFalse(action.handleDownloadInProgress("no-task-for-this-one", randomBoolean(), ActionListener.noop())); + } + private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws Exception { var esStatusException = new ElasticsearchStatusException(message, status, exception); @@ -152,7 +187,7 @@ private void assertNotificationAndOnFailure( ) throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); - var task = mock(Task.class); + var task = mock(ModelDownloadTask.class); ModelImporter uploader = createUploader(thrownException); var failureRef = new AtomicReference(); @@ -160,7 +195,14 @@ private void assertNotificationAndOnFailure( (AcknowledgedResponse response) -> { fail("received a acknowledged response: " + response.toString()); }, failureRef::set ); - TransportLoadTrainedModelPackage.importModel(client, taskManager, createRequestWithWaiting(), uploader, listener, task); + TransportLoadTrainedModelPackage.importModel( + client, + () -> taskManager.unregister(task), + createRequestWithWaiting(), + uploader, + task, + listener + ); var notificationArg = ArgumentCaptor.forClass(AuditMlNotificationAction.Request.class); // 2 notifications- the starting message and the failure diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index ba4483493da1d..e0405b1749536 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -45,7 +45,6 @@ import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.InferenceWaitForAllocation; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; -import org.elasticsearch.xpack.ml.inference.adaptiveallocations.ScaleFromZeroFeatureFlag; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; @@ -277,13 +276,11 @@ private void inferAgainstAllocatedModel( boolean starting = adaptiveAllocationsScalerService.maybeStartAllocation(assignment); if (starting) { message += "; starting deployment of one allocation"; - - if (ScaleFromZeroFeatureFlag.isEnabled()) { - waitForAllocation.waitForAssignment( - new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) - ); - return; - } + logger.debug(message); + waitForAllocation.waitForAssignment( + new InferenceWaitForAllocation.WaitingRequest(request, responseBuilder, parentTaskId, listener) + ); + return; } logger.debug(message); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 5fd70ce71cd24..f01372ca4f246 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -190,11 +190,11 @@ protected void masterOperation( () -> "[" + request.getDeploymentId() + "] creating new assignment for model [" + request.getModelId() + "] failed", e ); - if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException resourceAlreadyExistsException) { e = new ElasticsearchStatusException( "Cannot start deployment [{}] because it has already been started", RestStatus.CONFLICT, - e, + resourceAlreadyExistsException, request.getDeploymentId() ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index e55736cf43607..e13b1e0033191 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Releasables; @@ -110,31 +111,33 @@ protected void doClose() { } @Override - public InternalAggregation[] buildAggregations(long[] ordsToCollect) throws IOException { - Bucket[][] topBucketsPerOrd = new Bucket[ordsToCollect.length][]; - for (int ordIdx = 0; ordIdx < ordsToCollect.length; ordIdx++) { - final long ord = ordsToCollect[ordIdx]; - final TokenListCategorizer categorizer = (ord < categorizers.size()) ? categorizers.get(ord) : null; - if (categorizer == null) { - topBucketsPerOrd[ordIdx] = new Bucket[0]; - continue; + public InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws IOException { + try (ObjectArray topBucketsPerOrd = bigArrays().newObjectArray(ordsToCollect.size())) { + for (long ordIdx = 0; ordIdx < ordsToCollect.size(); ordIdx++) { + final long ord = ordsToCollect.get(ordIdx); + final TokenListCategorizer categorizer = (ord < categorizers.size()) ? categorizers.get(ord) : null; + if (categorizer == null) { + topBucketsPerOrd.set(ordIdx, new Bucket[0]); + continue; + } + int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); + checkRealMemoryCBForInternalBucket(); + topBucketsPerOrd.set(ordIdx, categorizer.toOrderedBuckets(size)); } - int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); - topBucketsPerOrd[ordIdx] = categorizer.toOrderedBuckets(size); - } - buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); - InternalAggregation[] results = new InternalAggregation[ordsToCollect.length]; - for (int ordIdx = 0; ordIdx < ordsToCollect.length; ordIdx++) { - results[ordIdx] = new InternalCategorizationAggregation( - name, - bucketCountThresholds.getRequiredSize(), - bucketCountThresholds.getMinDocCount(), - similarityThreshold, - metadata(), - Arrays.asList(topBucketsPerOrd[ordIdx]) + buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); + + return buildAggregations( + Math.toIntExact(ordsToCollect.size()), + ordIdx -> new InternalCategorizationAggregation( + name, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + similarityThreshold, + metadata(), + Arrays.asList(topBucketsPerOrd.get(ordIdx)) + ) ); } - return results; } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java index 350f45afb9e1f..1b28ebbb3eec6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/DelegatingCircuitBreakerService.java @@ -40,10 +40,12 @@ * At the time of writing circuit breakers are a global gauge.) * * After the map phase and before reduce, the {@link ItemSetMapReduceAggregator} creates instances of - * {@link InternalItemSetMapReduceAggregation}, see {@link ItemSetMapReduceAggregator#buildAggregations(long[])}. + * {@link InternalItemSetMapReduceAggregation}, see + * {@link ItemSetMapReduceAggregator#buildAggregations(org.elasticsearch.common.util.LongArray)}. * * (Note 1: Instead of keeping the existing instance, it would have been possible to deep-copy the object like - * {@link CardinalityAggregator#buildAggregations(long[])}. I decided against this approach mainly because the deep-copy isn't + * {@link CardinalityAggregator#buildAggregations(org.elasticsearch.common.util.LongArray)}. + * I decided against this approach mainly because the deep-copy isn't * secured by circuit breakers, meaning the node could run out of memory during the deep-copy.) * (Note 2: Between {@link ItemSetMapReduceAggregator#doClose()} and serializing {@link InternalItemSetMapReduceAggregation} * memory accounting is broken, meaning the agg context gets closed and bytes get returned to the circuit breaker before memory is diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 0f9555c77341f..1a5e5d7a0790e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; @@ -117,9 +118,9 @@ public InternalAggregation buildEmptyAggregation() { } @Override - public final InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[owningBucketOrds.length]; - for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { + InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; + for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { results[ordIdx] = buildAggregation(ordIdx); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java deleted file mode 100644 index 4c446b65db9dd..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleFromZeroFeatureFlag.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.adaptiveallocations; - -import org.elasticsearch.common.util.FeatureFlag; - -public class ScaleFromZeroFeatureFlag { - private ScaleFromZeroFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("ml_scale_from_zero"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index deb645ff96133..4a9d65481d412 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -981,7 +981,7 @@ private static Set countInferenceProcessors(IngestMetadata ingestMetadat return allReferencedModelKeys; } ingestMetadata.getPipelines().forEach((pipelineId, pipelineConfiguration) -> { - Object processors = pipelineConfiguration.getConfigAsMap().get("processors"); + Object processors = pipelineConfiguration.getConfig().get("processors"); if (processors instanceof List) { for (Object processor : (List) processors) { if (processor instanceof Map) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java index 31deac066cba2..01821f5582471 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java @@ -367,8 +367,10 @@ List tokenize(CharSequence inputSequence, IntToIntFuncti new DelimitedToken.Encoded( Strings.format("<0x%02X>", bytes[i]), pieces[i], + // even though we are changing the number of characters in the output, we don't + // need to change the offsets. The offsets refer to the input characters offsetCorrection.apply(node.startsAtCharPos), - offsetCorrection.apply(startsAtBytes + i) + offsetCorrection.apply(endsAtChars) ) ); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java index 0f39104511be0..8bccc2e3c5c23 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1MissingIndicesIT.java @@ -76,7 +76,7 @@ public class CrossClusterEsqlRCS1MissingIndicesIT extends AbstractRemoteClusterS record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {} @SuppressWarnings("unchecked") - public void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { + void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { Map clusters = (Map) responseMap.get("_clusters"); assertThat((int) responseMap.get("took"), greaterThan(0)); @@ -220,7 +220,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableTrue() throw ); } - // since at least one index of the query matches on some cluster, a wildcarded index on skip_un=true is not an error + // since at least one index of the query matches on some cluster, a missing wildcarded index on skip_un=true is not an error { String q = Strings.format("FROM %s,%s:nomatch*", INDEX1, REMOTE_CLUSTER_ALIAS); @@ -358,7 +358,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() thro String limit0 = q + " | LIMIT 0"; e = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(limit0))); - assertThat(e.getMessage(), Matchers.containsString("Unknown index [nomatch]")); + assertThat(e.getMessage(), containsString("Unknown index [nomatch]")); } // missing concrete remote index is not fatal when skip_unavailable=true (as long as an index matches on another cluster) @@ -371,7 +371,7 @@ public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() thro String limit0 = q + " | LIMIT 0"; e = expectThrows(ResponseException.class, () -> client().performRequest(esqlRequest(limit0))); - assertThat(e.getMessage(), Matchers.containsString(Strings.format("Unknown index [%s:nomatch]", REMOTE_CLUSTER_ALIAS))); + assertThat(e.getMessage(), containsString(Strings.format("Unknown index [%s:nomatch]", REMOTE_CLUSTER_ALIAS))); } // since there is at least one matching index in the query, the missing wildcarded local index is not an error diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java new file mode 100644 index 0000000000000..52cd0655fbfdf --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2UnavailableRemotesIT.java @@ -0,0 +1,316 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class CrossClusterEsqlRCS2UnavailableRemotesIT extends AbstractRemoteClusterSecurityTestCase { + private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); + + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .nodes(1) + .module("x-pack-esql") + .apply(commonClusterConfig) + .setting("remote_cluster.port", "0") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_server.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key") + .setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password") + .node(0, spec -> spec.setting("remote_cluster_server.enabled", "true")) + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .module("x-pack-esql") + .apply(commonClusterConfig) + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_client.ssl.enabled", "true") + .setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("cluster.remote.my_remote_cluster.credentials", () -> { + if (API_KEY_MAP_REF.get() == null) { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["*"] + } + ] + }"""); + API_KEY_MAP_REF.set(apiKeyMap); + } + return (String) API_KEY_MAP_REF.get().get("encoded"); + }) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user(REMOTE_METRIC_USER, PASS.toString(), "read_remote_shared_metrics", false) + .build(); + } + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + @Before + public void setupPreRequisites() throws Exception { + setupRolesAndPrivileges(); + loadData(); + } + + public void testEsqlRcs2UnavailableRemoteScenarios() throws Exception { + clusterShutDownWithRandomSkipUnavailable(); + remoteClusterShutdownWithSkipUnavailableTrue(); + remoteClusterShutdownWithSkipUnavailableFalse(); + } + + private void clusterShutDownWithRandomSkipUnavailable() throws Exception { + // skip_unavailable is set to a random boolean value. + // However, no clusters are stopped. Hence, we do not expect any other behaviour + // other than a 200-OK. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), randomBoolean()); + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); + + Map map = responseAsMap(response); + ArrayList columns = (ArrayList) map.get("columns"); + ArrayList values = (ArrayList) map.get("values"); + Map clusters = (Map) map.get("_clusters"); + Map clusterDetails = (Map) clusters.get("details"); + Map localClusterDetails = (Map) clusterDetails.get("(local)"); + Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + + assertOK(response); + assertThat((int) map.get("took"), greaterThan(0)); + assertThat(columns.size(), is(4)); + assertThat(values.size(), is(9)); + + assertThat((int) clusters.get("total"), is(2)); + assertThat((int) clusters.get("successful"), is(2)); + assertThat((int) clusters.get("running"), is(0)); + assertThat((int) clusters.get("skipped"), is(0)); + assertThat((int) clusters.get("partial"), is(0)); + assertThat((int) clusters.get("failed"), is(0)); + + assertThat(clusterDetails.size(), is(2)); + assertThat((int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(localClusterDetails.get("status"), is("successful")); + + assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(remoteClusterDetails.get("status"), is("successful")); + } + + @SuppressWarnings("unchecked") + private void remoteClusterShutdownWithSkipUnavailableTrue() throws Exception { + // Remote cluster is stopped and skip unavailable is set to true. + // We expect no exception and partial results from the remaining open cluster. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), true); + + try { + // Stop remote cluster. + fulfillingCluster.stop(true); + + // A simple query that targets our remote cluster. + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + Response response = performRequestWithRemoteSearchUser(esqlRequest(query)); + + Map map = responseAsMap(response); + ArrayList columns = (ArrayList) map.get("columns"); + ArrayList values = (ArrayList) map.get("values"); + Map clusters = (Map) map.get("_clusters"); + Map clusterDetails = (Map) clusters.get("details"); + Map localClusterDetails = (Map) clusterDetails.get("(local)"); + Map remoteClusterDetails = (Map) clusterDetails.get("my_remote_cluster"); + + // Assert results obtained from the local cluster and that remote cluster was + // skipped. + assertOK(response); + assertThat((int) map.get("took"), greaterThan(0)); + + assertThat(columns.size(), is(2)); + assertThat(values.size(), is(5)); + + assertThat((int) clusters.get("total"), is(2)); + assertThat((int) clusters.get("successful"), is(1)); + assertThat((int) clusters.get("skipped"), is(1)); + assertThat((int) clusters.get("running"), is(0)); + assertThat((int) clusters.get("partial"), is(0)); + assertThat((int) clusters.get("failed"), is(0)); + + assertThat(clusterDetails.size(), is(2)); + assertThat((int) localClusterDetails.get("took"), greaterThan(0)); + assertThat(localClusterDetails.get("status"), is("successful")); + + assertThat((int) remoteClusterDetails.get("took"), greaterThan(0)); + assertThat(remoteClusterDetails.get("status"), is("skipped")); + + ArrayList remoteClusterFailures = (ArrayList) remoteClusterDetails.get("failures"); + assertThat(remoteClusterFailures.size(), equalTo(1)); + Map failuresMap = (Map) remoteClusterFailures.get(0); + + Map reason = (Map) failuresMap.get("reason"); + assertThat(reason.get("type").toString(), equalTo("connect_transport_exception")); + assertThat(reason.get("reason").toString(), containsString("Unable to connect to [my_remote_cluster]")); + } finally { + fulfillingCluster.start(); + closeFulfillingClusterClient(); + initFulfillingClusterClient(); + } + } + + private void remoteClusterShutdownWithSkipUnavailableFalse() throws Exception { + // Remote cluster is stopped and skip_unavailable is set to false. + // Although the other cluster is open, we expect an Exception. + + configureRemoteCluster("my_remote_cluster", fulfillingCluster, false, randomBoolean(), false); + + try { + // Stop remote cluster. + fulfillingCluster.stop(true); + + // A simple query that targets our remote cluster. + String query = "FROM *,my_remote_cluster:* | LIMIT 10"; + ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); + assertThat(ex.getMessage(), containsString("connect_transport_exception")); + } finally { + fulfillingCluster.start(); + closeFulfillingClusterClient(); + initFulfillingClusterClient(); + } + } + + private void setupRolesAndPrivileges() throws IOException { + var putUserRequest = new Request("PUT", "/_security/user/" + REMOTE_SEARCH_USER); + putUserRequest.setJsonEntity(""" + { + "password": "x-pack-test-password", + "roles" : ["remote_search"] + }"""); + assertOK(adminClient().performRequest(putUserRequest)); + + var putRoleOnRemoteClusterRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleOnRemoteClusterRequest.setJsonEntity(""" + { + "indices": [ + { + "names": ["task", "hits"], + "privileges": ["read", "read_cross_cluster", "create_index", "monitor"] + } + ], + "remote_indices": [ + { + "names": ["task", "hits"], + "privileges": ["read", "read_cross_cluster", "create_index", "monitor"], + "clusters": ["*"] + } + ] + }"""); + assertOK(adminClient().performRequest(putRoleOnRemoteClusterRequest)); + } + + private void loadData() throws IOException { + Request createIndex = new Request("PUT", "task"); + createIndex.setJsonEntity(""" + { + "mappings": { + "properties": { + "id": { "type": "integer" }, + "time_taken_millis": { "type": "integer" } + } + } + } + """); + assertOK(client().performRequest(createIndex)); + + Request bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(""" + { "index": { "_index": "task" } } + { "id": 1, "time_taken_millis": 39} + { "index": { "_index": "task" } } + { "id": 2, "time_taken_millis": 25} + { "index": { "_index": "task" } } + { "id": 3, "time_taken_millis": 42} + { "index": { "_index": "task" } } + { "id": 4, "time_taken_millis": 16} + { "index": { "_index": "task" } } + { "id": 5, "time_taken_millis": 62} + """); + assertOK(client().performRequest(bulkRequest)); + + createIndex = new Request("PUT", "hits"); + createIndex.setJsonEntity(""" + { + "mappings": { + "properties": { + "endpoint_id": { "type": "integer" }, + "t_hits": { "type": "integer" } + } + } + } + """); + assertOK(performRequestAgainstFulfillingCluster(createIndex)); + + bulkRequest = new Request("POST", "/_bulk?refresh=true"); + bulkRequest.setJsonEntity(""" + { "index": {"_index": "hits"}} + { "endpoint_id": 1, "t_hits": 1267 } + { "index": {"_index": "hits"}} + { "endpoint_id": 2, "t_hits": 1389 } + { "index": {"_index": "hits"}} + { "endpoint_id": 3, "t_hits": 1922 } + { "index": {"_index": "hits"}} + { "endpoint_id": 4, "t_hits": 1547 } + """); + assertOK(performRequestAgainstFulfillingCluster(bulkRequest)); + } + + private Response performRequestWithRemoteSearchUser(final Request request) throws IOException { + request.setOptions( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", headerFromRandomAuthMethod(REMOTE_SEARCH_USER, PASS)) + ); + return client().performRequest(request); + } + + private Request esqlRequest(String query) throws IOException { + XContentBuilder body = JsonXContent.contentBuilder(); + + body.startObject(); + body.field("query", query); + body.field("include_ccs_metadata", true); + body.endObject(); + + Request request = new Request("POST", "_query"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(body)); + + return request; + } +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 74ef6f0dafe63..09449f81121fd 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -13,6 +13,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; @@ -31,6 +32,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.Map; @@ -43,9 +45,12 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -347,7 +352,7 @@ public void testCrossClusterQuery() throws Exception { assertRemoteOnlyResults(response); // same as above but authenticate with API key - response = performRequestWithRemoteSearchUserViaAPIKey(request); + response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey()); assertRemoteOnlyResults(response); // query remote and local cluster @@ -704,7 +709,7 @@ public void testCrossClusterEnrich() throws Exception { assertWithEnrich(response); // same as above but authenticate with API key - response = performRequestWithRemoteSearchUserViaAPIKey(request); + response = performRequestWithRemoteSearchUserViaAPIKey(request, createRemoteSearchUserAPIKey()); assertWithEnrich(response); // Query cluster @@ -968,6 +973,462 @@ public void testAlias() throws Exception { removeAliases(); } + @SuppressWarnings("unchecked") + public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableTrue() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), true); + populateData(); + { + final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}], + "cluster": [ "manage_own_api_key" ], + "remote_indices": [ + { + "names": ["employees*"], + "privileges": ["read"], + "clusters": ["my_remote_cluster"] + } + ] + }"""); + Response response = adminClient().performRequest(putRoleRequest); + assertOK(response); + } + + String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey(); + + // sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote + { + Request request = esqlRequest(""" + FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2 + | SORT emp_id ASC + | LIMIT 9 + | KEEP emp_id, department"""); + + CheckedConsumer verifier = resp -> { + assertOK(resp); + Map map = responseAsMap(resp); + assertThat(((ArrayList) map.get("columns")).size(), greaterThanOrEqualTo(1)); + assertThat(((ArrayList) map.get("values")).size(), greaterThanOrEqualTo(1)); + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "nomatch*", "successful", null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null) + ) + ); + }; + + verifier.accept(performRequestWithRemoteSearchUser(request)); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey)); + } + + // missing concrete local index is an error + { + String q = "FROM employees_nomatch,my_remote_cluster:employees"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + } + + // missing concrete remote index is not fatal when skip_unavailable=true (as long as an index matches on another cluster) + { + String q = "FROM employees,my_remote_cluster:employees_nomatch"; + + CheckedBiConsumer verifier = new CheckedBiConsumer() { + @Override + public void accept(Response response, Boolean limit0) throws Exception { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees_nomatch", "skipped", 0) + ) + ); + } + }; + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // since there is at least one matching index in the query, the missing wildcarded local index is not an error + { + String q = "FROM employees_nomatch*,my_remote_cluster:employees"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // since at least one index of the query matches on some cluster, a missing wildcarded index on skip_un=true is not an error + { + String q = "FROM employees,my_remote_cluster:employees_nomatch*"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster("my_remote_cluster", "employees_nomatch*", "skipped", 0) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // an error is thrown if there are no matching indices at all, even when the cluster is skip_unavailable=true + { + // with non-matching concrete index + String q = "FROM my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // an error is thrown if there are no matching indices at all, even when the cluster is skip_unavailable=true and the + // index was wildcarded + { + String localExpr = randomFrom("nomatch", "nomatch*"); + String remoteExpr = randomFrom("nomatch", "nomatch*"); + String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr); + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + } + + // missing concrete index on skip_unavailable=true cluster is not an error + { + String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + final List expectedClusters = List.of( + new ExpectedCluster("(local)", "employees", "successful", limit0 ? 0 : null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees_nomatch,employees*", "successful", 0) + ); + assertExpectedClustersForMissingIndicesTests(map, expectedClusters); + }; + + // TODO: uncomment in follow on PR handling skip_unavailable errors at execution time + // Request limit1 = esqlRequest(q + " | LIMIT 1"); + // verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + // verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + } + + @SuppressWarnings("unchecked") + public void testSearchesAgainstNonMatchingIndicesWithSkipUnavailableFalse() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, false, randomBoolean(), false); + populateData(); + + { + final var putRoleRequest = new Request("PUT", "/_security/role/" + REMOTE_SEARCH_ROLE); + putRoleRequest.setJsonEntity(""" + { + "indices": [{"names": ["employees*"], "privileges": ["read","read_cross_cluster"]}], + "cluster": [ "manage_own_api_key" ], + "remote_indices": [ + { + "names": ["employees*"], + "privileges": ["read"], + "clusters": ["my_remote_cluster"] + } + ] + }"""); + Response response = adminClient().performRequest(putRoleRequest); + assertOK(response); + } + + String remoteSearchUserAPIKey = createRemoteSearchUserAPIKey(); + + // sanity check - init queries to ensure we can query employees on local and employees,employees2 on remote + { + Request request = esqlRequest(""" + FROM employees,my_remote_cluster:employees,my_remote_cluster:employees2 + | SORT emp_id ASC + | LIMIT 5 + | KEEP emp_id, department"""); + + CheckedConsumer verifier = resp -> { + assertOK(resp); + Map map = responseAsMap(resp); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "nomatch*", "successful", null), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees,employees2", "successful", null) + ) + ); + }; + + final Response response = performRequestWithRemoteSearchUser(request); + assertOK(response); + verifier.accept(performRequestWithRemoteSearchUser(request)); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(request, remoteSearchUserAPIKey)); + } + + // missing concrete local index is an error + { + String q = "FROM employees_nomatch,my_remote_cluster:employees"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [employees_nomatch]")); + } + + // missing concrete remote index is fatal error when skip_unavailable=false + { + String q = "FROM employees,my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // since there is at least one matching index in the query, the missing wildcarded local index is not an error + { + String q = "FROM employees_nomatch*,my_remote_cluster:employees"; + + CheckedBiConsumer verifier = (response, limit0) -> { + assertOK(response); + Map map = responseAsMap(response); + assertThat(((List) map.get("columns")).size(), greaterThanOrEqualTo(1)); + if (limit0) { + assertThat(((List) map.get("values")).size(), equalTo(0)); + } else { + assertThat(((List) map.get("values")).size(), greaterThanOrEqualTo(1)); + } + assertExpectedClustersForMissingIndicesTests( + map, + List.of( + // local cluster is never marked as SKIPPED even when no matching indices - just marked as 0 shards searched + new ExpectedCluster("(local)", "employees_nomatch*", "successful", 0), + new ExpectedCluster(REMOTE_CLUSTER_ALIAS, "employees", "successful", limit0 ? 0 : null) + ) + ); + }; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + verifier.accept(performRequestWithRemoteSearchUser(limit1), false); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey), false); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + verifier.accept(performRequestWithRemoteSearchUser(limit0), true); + verifier.accept(performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey), true); + } + + // query is fatal since the remote cluster has skip_unavailable=false and has no matching indices + { + String q = "FROM employees,my_remote_cluster:employees_nomatch*"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch*]")); + } + + // an error is thrown if there are no matching indices at all + { + // with non-matching concrete index + String q = "FROM my_remote_cluster:employees_nomatch"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + + // an error is thrown if there are no matching indices at all + { + String localExpr = randomFrom("nomatch", "nomatch*"); + String remoteExpr = randomFrom("nomatch", "nomatch*"); + String q = Strings.format("FROM %s,%s:%s", localExpr, REMOTE_CLUSTER_ALIAS, remoteExpr); + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + + Request limit0 = esqlRequest(q + " | LIMIT 0"); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit0, remoteSearchUserAPIKey)); + assertThat(e.getMessage(), containsString("Unknown index")); + assertThat(e.getMessage(), containsString(Strings.format("%s:%s", REMOTE_CLUSTER_ALIAS, remoteExpr))); + } + + // error since the remote cluster with skip_unavailable=false specified a concrete index that is not found + { + String q = "FROM employees,my_remote_cluster:employees_nomatch,my_remote_cluster:employees*"; + + Request limit1 = esqlRequest(q + " | LIMIT 1"); + ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit1)); + /* Example error: + *{"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards + * remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated by + * API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by + * the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster] + * towards remote cluster is unauthorized for user [remote_search_user] with assigned roles [remote_search] authenticated + * by API key id [zaeMK5MBeGk5jCIiFtqB] of user [test_user] on indices [employees_nomatch], this action is granted by the + * index privileges [read,all]"},"status":403}" + */ + assertThat(e.getMessage(), containsString("unauthorized for user [remote_search_user]")); + assertThat(e.getMessage(), containsString("on indices [employees_nomatch]")); + assertThat(e.getMessage(), containsString("security_exception")); + + e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUserViaAPIKey(limit1, remoteSearchUserAPIKey)); + /* Example error: + * {"error":{"root_cause":[{"type":"security_exception","reason":"action [indices:data/read/esql/cluster] towards + * remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated by + * API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by + * the index privileges [read,all]"}],"type":"security_exception","reason":"action [indices:data/read/esql/cluster] + * towards remote cluster is unauthorized for API key id [sxuSK5MBSfGSGj4YFLyv] of user [remote_search_user] authenticated + * by API key id [cUiRK5MB5j18U5stsvQj] of user [test_user] on indices [employees_nomatch], this action is granted by the + * index privileges [read,all]"},"status":403}" + */ + assertThat(e.getMessage(), containsString("unauthorized for API key id")); + assertThat(e.getMessage(), containsString("of user [remote_search_user]")); + assertThat(e.getMessage(), containsString("on indices [employees_nomatch]")); + assertThat(e.getMessage(), containsString("security_exception")); + + // TODO: in follow on PR, add support for throwing a VerificationException for this scenario - no exception is currently thrown + // Request limit0 = esqlRequest(q + " | LIMIT 0"); + // e = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(limit0)); + // assertThat(e.getMessage(), containsString("Unknown index [my_remote_cluster:employees_nomatch]")); + } + } + protected Request esqlRequest(String command) throws IOException { XContentBuilder body = JsonXContent.contentBuilder(); body.startObject(); @@ -1007,7 +1468,12 @@ private Response performRequestWithRemoteSearchUser(final Request request) throw return client().performRequest(request); } - private Response performRequestWithRemoteSearchUserViaAPIKey(final Request request) throws IOException { + private Response performRequestWithRemoteSearchUserViaAPIKey(Request request, String encodedApiKey) throws IOException { + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encodedApiKey)); + return client().performRequest(request); + } + + private String createRemoteSearchUserAPIKey() throws IOException { final Request createApiKeyRequest = new Request("POST", "_security/api_key"); createApiKeyRequest.setJsonEntity(""" { @@ -1021,8 +1487,7 @@ private Response performRequestWithRemoteSearchUserViaAPIKey(final Request reque assertOK(response); final Map responseAsMap = responseAsMap(response); final String encoded = (String) responseAsMap.get("encoded"); - request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + encoded)); - return client().performRequest(request); + return encoded; } @SuppressWarnings("unchecked") @@ -1145,4 +1610,54 @@ private void assertWithEnrich(Response response) throws IOException { assertThat(flatList, containsInAnyOrder(2, 3, "usa", "canada")); } + record ExpectedCluster(String clusterAlias, String indexExpression, String status, Integer totalShards) {} + + @SuppressWarnings("unchecked") + void assertExpectedClustersForMissingIndicesTests(Map responseMap, List expected) { + Map clusters = (Map) responseMap.get("_clusters"); + assertThat((int) responseMap.get("took"), greaterThan(0)); + + Map detailsMap = (Map) clusters.get("details"); + assertThat(detailsMap.size(), is(expected.size())); + + assertThat((int) clusters.get("total"), is(expected.size())); + assertThat((int) clusters.get("successful"), is((int) expected.stream().filter(ec -> ec.status().equals("successful")).count())); + assertThat((int) clusters.get("skipped"), is((int) expected.stream().filter(ec -> ec.status().equals("skipped")).count())); + assertThat((int) clusters.get("failed"), is((int) expected.stream().filter(ec -> ec.status().equals("failed")).count())); + + for (ExpectedCluster expectedCluster : expected) { + Map clusterDetails = (Map) detailsMap.get(expectedCluster.clusterAlias()); + String msg = expectedCluster.clusterAlias(); + + assertThat(msg, (int) clusterDetails.get("took"), greaterThan(0)); + assertThat(msg, clusterDetails.get("status"), is(expectedCluster.status())); + Map shards = (Map) clusterDetails.get("_shards"); + if (expectedCluster.totalShards() == null) { + assertThat(msg, (int) shards.get("total"), greaterThan(0)); + } else { + assertThat(msg, (int) shards.get("total"), is(expectedCluster.totalShards())); + } + + if (expectedCluster.status().equals("successful")) { + assertThat((int) shards.get("successful"), is((int) shards.get("total"))); + assertThat((int) shards.get("skipped"), is(0)); + + } else if (expectedCluster.status().equals("skipped")) { + assertThat((int) shards.get("successful"), is(0)); + assertThat((int) shards.get("skipped"), is((int) shards.get("total"))); + ArrayList failures = (ArrayList) clusterDetails.get("failures"); + assertThat(failures.size(), is(1)); + Map failure1 = (Map) failures.get(0); + Map innerReason = (Map) failure1.get("reason"); + String expectedMsg = "Unknown index [" + expectedCluster.indexExpression() + "]"; + assertThat(innerReason.get("reason").toString(), containsString(expectedMsg)); + assertThat(innerReason.get("type").toString(), containsString("verification_exception")); + + } else { + fail(msg + "; Unexpected status: " + expectedCluster.status()); + } + // currently failed shards is always zero - change this once we start allowing partial data for individual shard failures + assertThat((int) shards.get("failed"), is(0)); + } + } } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java index 42982e6183613..fb941e9e815cf 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityReloadCredentialsRestIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.nullValue; // account for slow stored secure settings updates (involves removing and re-creating the keystore) -@TimeoutSuite(millis = 10 * TimeUnits.MINUTE) +@TimeoutSuite(millis = 20 * TimeUnits.MINUTE) public class RemoteClusterSecurityReloadCredentialsRestIT extends AbstractRemoteClusterSecurityTestCase { private static final MutableSettingsProvider keystoreSettings = new MutableSettingsProvider(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java index 8509a6475aa71..5d4ea0f30cb15 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandlerTests.java @@ -58,7 +58,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien } }; FakeRestRequest fakeRestRequest = new FakeRestRequest(); - FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, securityEnabled ? 0 : 1); + FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), securityEnabled ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java index b734e602ec291..6ff05faf22d11 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/ApiKeyBaseRestHandlerTests.java @@ -56,7 +56,7 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien } }; final var fakeRestRequest = new FakeRestRequest(); - final var fakeRestChannel = new FakeRestChannel(fakeRestRequest, true, requiredSettingsEnabled ? 0 : 1); + final var fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), requiredSettingsEnabled ? 0 : 1); try (var threadPool = createThreadPool()) { final var client = new NoOpNodeClient(threadPool); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java index 79dba637d53d0..9a05230d82ae6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateApiKeyActionTests.java @@ -75,7 +75,7 @@ public void testCreateApiKeyApi() throws Exception { ).withParams(Collections.singletonMap("refresh", randomFrom("false", "true", "wait_for"))).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java index a47855731b37a..812354986d5bc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestCreateCrossClusterApiKeyActionTests.java @@ -115,7 +115,7 @@ public void testLicenseEnforcement() throws Exception { } }"""), XContentType.JSON).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java index c65634a76b532..d88a217cd0949 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGetApiKeyActionTests.java @@ -91,7 +91,7 @@ public void testGetApiKey() throws Exception { final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -159,7 +159,7 @@ public void testGetApiKeyWithProfileUid() throws Exception { } final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -224,7 +224,7 @@ public void testGetApiKeyOwnedByCurrentAuthenticatedUser() throws Exception { final FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java index 2cb1b6a66b02b..ac472378d4874 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestInvalidateApiKeyActionTests.java @@ -77,7 +77,7 @@ public void testInvalidateApiKey() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -144,7 +144,7 @@ public void testInvalidateApiKeyOwnedByCurrentAuthenticatedUser() throws Excepti ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java index 7005b5158e626..d5aa249b1d0f5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestQueryApiKeyActionTests.java @@ -110,7 +110,7 @@ public void testQueryParsing() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -184,7 +184,7 @@ public void testAggsAndAggregationsTogether() { XContentType.JSON ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -230,7 +230,7 @@ public void testParsingSearchParameters() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -290,7 +290,7 @@ public void testQueryApiKeyWithProfileUid() throws Exception { } FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(param).build(); SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java index 6c71f30243eaf..f2fe28b2a936f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/apikey/RestUpdateCrossClusterApiKeyActionTests.java @@ -94,7 +94,7 @@ public void testLicenseEnforcement() throws Exception { "metadata": {} }"""), XContentType.JSON).withParams(Map.of("id", randomAlphaOfLength(10))).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java index bd665560f425f..2ac33a780313e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenActionTests.java @@ -43,7 +43,7 @@ public class RestGetTokenActionTests extends ESTestCase { public void testListenerHandlesExceptionProperly() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -67,7 +67,7 @@ public void sendResponse(RestResponse restResponse) { public void testSendResponse() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -114,7 +114,7 @@ public void sendResponse(RestResponse restResponse) { public void testSendResponseKerberosError() { FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).build(); final SetOnce responseSetOnce = new SetOnce<>(); - RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java index 38405a2167808..4a593eeb24ac6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java @@ -73,7 +73,7 @@ public void testQueryParsing() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); @@ -132,7 +132,7 @@ public void testParsingSearchParameters() throws Exception { ).build(); final SetOnce responseSetOnce = new SetOnce<>(); - final RestChannel restChannel = new AbstractRestChannel(restRequest, true) { + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { @Override public void sendResponse(RestResponse restResponse) { responseSetOnce.set(restResponse); diff --git a/x-pack/plugin/sql/qa/jdbc/security/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/build.gradle index c446755e91929..82510285cb996 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/build.gradle @@ -1,4 +1,8 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,7 +15,10 @@ dependencies { Project mainProject = project + subprojects { + def clusterPath = getPath() + // Use tests from the root security qa project in subprojects configurations.create('testArtifacts').transitive(false) @@ -46,6 +53,17 @@ subprojects { dependsOn copyTestClasses classpath += configurations.testArtifacts testClassesDirs = project.files(testArtifactsDir) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("javaRestTest") + it.parameters.service = serviceProvider + } + nonInputProperties.systemProperty 'tests.audit.logfile', "${-> testClusters.javaRestTest.singleNode().getAuditLog()}" nonInputProperties.systemProperty 'tests.audit.yesterday.logfile', diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 83234901ae8f2..35cfbac5e3439 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -170,4 +170,43 @@ setup: catch: /cannot sort on _source/ esql.query: body: - query: 'FROM test metadata _source | sort _source' + query: 'FROM test metadata _source | SORT _source' + +--- +"sort returning _source is allowed": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sort_returning_source_ok] + reason: "Sorts returning _source should be ok, but weren't in older versions" + - do: + esql.query: + body: + query: 'FROM test METADATA _source | SORT case ASC | KEEP case, _source | LIMIT 5' + - length: { columns: 2 } + - length: { values: 3 } + - match: {columns.0.name: "case"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "_source"} + - match: {columns.1.type: "_source"} + - match: {values.0.0: "all_ignored"} + - match: {values.0.1: { + "integer" : "not-an-integer", + "keyword" : "long-keyword", + "case" : "all_ignored" + }} + - match: {values.1.0: "integer_ignored"} + - match: {values.1.1: { + "integer" : "not-an-integer", + "keyword" : "ok", + "case" : "integer_ignored" + }} + - match: {values.2.0: "ok"} + - match: {values.2.1: { + "integer" : 10, + "keyword" : "ok", + "case" : "ok" + }} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml index 076bf116292d0..3f2bcb4ed7f4d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/61_enrich_ip.yml @@ -162,14 +162,38 @@ teardown: --- "Invalid IP strings": - requires: - cluster_features: ["gte_v8.14.0"] - reason: "IP range ENRICH support was added in 8.14.0" + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Runtime range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] - do: - catch: /'invalid_[\d\.]+' is not an IP string literal/ + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:68|-1:-1): evaluation of \\[(ENRICH networks-policy ON ip_str|)\\] failed, treating result as null. Only first 20 failures recorded." + - "Line (1:68|-1:-1): java.lang.IllegalArgumentException: 'invalid_' is not an IP string literal." + esql.query: body: - query: 'FROM events | eval ip_str = concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + query: 'FROM events | eval ip_str = mv_concat("invalid_", to_string(ip)) | ENRICH networks-policy ON ip_str | sort @timestamp | KEEP ip, name, department, message' + + - match: { columns.0.name: "ip" } + - match: { columns.0.type: "ip" } + - match: { columns.1.name: "name" } + - match: { columns.1.type: "keyword" } + - match: { columns.2.name: "department" } + - match: { columns.2.type: "keyword" } + - match: { columns.3.name: "message" } + - match: { columns.3.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ "10.100.0.21", null, null, "network connected" ] } + - match: { values.1: [ [ "10.100.0.21", "10.101.0.107" ], null, null, "sending messages" ] } + - match: { values.2: [ "10.101.0.107" , null, null, "network disconnected" ] } + - match: { values.3: [ "13.101.0.114" , null, null, "authentication failed" ] } --- "IP": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml new file mode 100644 index 0000000000000..4d84a10507504 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/63_enrich_int_range.yml @@ -0,0 +1,199 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: ages + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + age_range: + type: "integer_range" + description: + type: "keyword" + + - do: + bulk: + index: ages + refresh: true + body: + - { "index": { } } + - { "age_range": { "gte": 0, "lt": 2 }, "description": "Baby" } + - { "index": { } } + - { "age_range": { "gte": 2, "lt": 4 }, "description": "Toddler" } + - { "index": { } } + - { "age_range": { "gte": 3, "lt": 5 }, "description": "Preschooler" } + - { "index": { } } + - { "age_range": { "gte": 5, "lt": 12 }, "description": "Child" } + - { "index": { } } + - { "age_range": { "gte": 13, "lt": 20 }, "description": "Adolescent" } + - { "index": { } } + - { "age_range": { "gte": 20, "lt": 40 }, "description": "Young Adult" } + - { "index": { } } + - { "age_range": { "gte": 40, "lt": 60 }, "description": "Middle-aged" } + - { "index": { } } + - { "age_range": { "gte": 60, "lt": 80 }, "description": "Senior" } + - { "index": { } } + - { "age_range": { "gte": 80, "lt": 100 }, "description": "Elderly" } + - { "index": { } } + - { "age_range": { "gte": 100, "lt": 200 }, "description": "Incredible" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: ages-policy + body: + range: + indices: [ "ages" ] + match_field: "age_range" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: ages-policy + + - do: + indices.create: + index: employees + body: + mappings: + properties: + name: + type: keyword + age: + type: integer + ak: + type: keyword + salary: + type: double + + - do: + bulk: + index: employees + refresh: true + body: + - { "index": { } } + - { "name": "Joe Soap", "age": 36, "ak": "36", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Doe", "age": 31, "ak": "31", "salary": 55.55 } + - { "index": { } } + - { "name": "Jane Immortal", "age": -1, "ak": "immortal", "salary": 55.55 } + - { "index": { } } + - { "name": "Magic Mike", "age": 44, "ak": "44", "salary": 55.55 } + - { "index": { } } + - { "name": "Anon Ymous", "age": 61, "ak": "61", "salary": 55.55 } + +--- +teardown: + - do: + enrich.delete_policy: + name: ages-policy + +--- +"ages": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON age | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line 1:29: evaluation of \\[ak::integer\\] failed, treating result as null. Only first 20 failures recorded." + - "Line 1:29: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number \\[immortal\\]" + esql.query: + body: + query: 'FROM employees | EVAL aki = ak::integer | ENRICH ages-policy ON aki | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"ages as keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON ak|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"immortal\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON ak | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 4 } + - match: { values.0: [ 2, "Young Adult" ] } + - match: { values.1: [ 1, "Middle-aged" ] } + - match: { values.2: [ 1, "Senior" ] } + - match: { values.3: [ 1, null ] } + +--- +"Invalid age as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + - "Line (1:18|-1:-1): evaluation of \\[(ENRICH ages-policy ON name|)\\] failed, treating result as null. Only first 20 failures recorded." + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Joe Soap\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Doe\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Jane Immortal\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Magic Mike\\"' + - 'Line (1:18|-1:-1): java.lang.NumberFormatException: For input string: \\"Anon Ymous\\"' + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON name | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 5, null ] } + +--- +"Invalid age as double": + - do: + catch: /ENRICH range and input types are incompatible. range\[INTEGER\], input\[DOUBLE\]/ + esql.query: + body: + query: 'FROM employees | ENRICH ages-policy ON salary | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml new file mode 100644 index 0000000000000..ef11e5098f5c2 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/64_enrich_int_match.yml @@ -0,0 +1,222 @@ +--- +setup: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [method, path, parameters, capabilities] + capabilities: [enrich_strict_range_types] + reason: "Strict range type checking was added" + test_runner_features: [capabilities, allowed_warnings_regex, warnings_regex] + + - do: + indices.create: + index: decades + body: + settings: + index.number_of_shards: 1 + index.routing.rebalance.enable: "none" + mappings: + properties: + decade: + type: "integer" + description: + type: "keyword" + + - do: + bulk: + index: decades + refresh: true + body: + - { "index": { } } + - { "decade": 1900, "description": "Gay Nineties" } + - { "index": { } } + - { "decade": 1910, "description": "Teens" } + - { "index": { } } + - { "decade": 1920, "description": "Roaring Twenties" } + - { "index": { } } + - { "decade": 1930, "description": "Dirty Thirties" } + - { "index": { } } + - { "decade": 1940, "description": "War Years" } + - { "index": { } } + - { "decade": 1950, "description": "Fabulous Fifties" } + - { "index": { } } + - { "decade": 1960, "description": "Swinging Sixties" } + - { "index": { } } + - { "decade": 1970, "description": "Me Decade" } + - { "index": { } } + - { "decade": 1980, "description": "Decade of Greed" } + - { "index": { } } + - { "decade": 1990, "description": "Nineties" } + - { "index": { } } + - { "decade": 2000, "description": "Aughts" } + - { "index": { } } + - { "decade": 2010, "description": "Digital Age" } + - { "index": { } } + - { "decade": 2020, "description": "Roaring Twenties 2.0" } + - do: + cluster.health: + wait_for_no_initializing_shards: true + wait_for_events: languid + + - do: + enrich.put_policy: + name: decades-policy + body: + match: + indices: [ "decades" ] + match_field: "decade" + enrich_fields: [ "description" ] + + - do: + enrich.execute_policy: + name: decades-policy + + - do: + indices.create: + index: songs + body: + mappings: + properties: + title: + type: keyword + year: + type: integer + singer: + type: keyword + + - do: + bulk: + index: songs + refresh: true + body: + - { "index": { } } + - { "singer": "Louis Armstrong", "title": "What a Wonderful World", "year": 1967 } + - { "index": { } } + - { "singer": "The Beatles", "title": "Hey Jude", "year": 1968 } + - { "index": { } } + - { "singer": "Elvis Presley", "title": "Jailhouse Rock", "year": 1957 } + - { "index": { } } + - { "singer": "Billie Holiday", "title": "Strange Fruit", "year": 1939 } + - { "index": { } } + - { "singer": "Frank Sinatra", "title": "Fly Me to the Moon", "year": 1964 } + - { "index": { } } + - { "singer": "Bob Dylan", "title": "Blowin' in the Wind", "year": 1963 } + - { "index": { } } + - { "singer": "Queen", "title": "Bohemian Rhapsody", "year": 1975 } + - { "index": { } } + - { "singer": "ABBA", "title": "Dancing Queen", "year": 1976 } + - { "index": { } } + - { "singer": "Michael Jackson", "title": "Thriller", "year": 1982 } + - { "index": { } } + - { "singer": "Nirvana", "title": "Smells Like Teen Spirit", "year": 1991 } + - { "index": { } } + - { "singer": "Whitney Houston", "title": "I Will Always Love You", "year": 1992 } + - { "index": { } } + - { "singer": "Aretha Franklin", "title": "Respect", "year": 1967 } + - { "index": { } } + - { "singer": "Chuck Berry", "title": "Johnny B. Goode", "year": 1958 } + - { "index": { } } + - { "singer": "Madonna", "title": "Like a Prayer", "year": 1989 } + - { "index": { } } + - { "singer": "The Rolling Stones", "title": "(I Can't Get No) Satisfaction", "year": 1965 } + - { "index": { } } + - { "singer": "Beyoncé", "title": "Single Ladies (Put a Ring on It)", "year": 2008 } + - { "index": { } } + - { "singer": "Adele", "title": "Rolling in the Deep", "year": 2010 } + - { "index": { } } + - { "singer": "Lady Gaga", "title": "Bad Romance", "year": 2009 } + - { "index": { } } + - { "singer": "Billie Eilish", "title": "Bad Guy", "year": 2019 } + - { "index": { } } + - { "singer": "Taylor Swift", "title": "Anti-Hero", "year": 2022 } + +--- +teardown: + - do: + enrich.delete_policy: + name: decades-policy + +--- +"decades": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::integer | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"decades as typecast keywords": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = (10*FLOOR(year/10))::keyword | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 9 } + - match: { values.0: [ 6, "Swinging Sixties" ] } + - match: { values.1: [ 2, "Aughts" ] } + - match: { values.2: [ 2, "Decade of Greed" ] } + - match: { values.3: [ 2, "Digital Age" ] } + - match: { values.4: [ 2, "Fabulous Fifties" ] } + - match: { values.5: [ 2, "Me Decade" ] } + - match: { values.6: [ 2, "Nineties" ] } + - match: { values.7: [ 1, "Dirty Thirties" ] } + - match: { values.8: [ 1, "Roaring Twenties 2.0" ] } + +--- +"Invalid decade as keyword": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | ENRICH decades-policy ON singer | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } + +--- +"Invalid decade as double": + - do: + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" + esql.query: + body: + query: 'FROM songs | EVAL decade = 10.0*FLOOR(year/10) | ENRICH decades-policy ON decade | STATS count=COUNT(*) BY description | SORT count DESC, description ASC' + + - match: { columns.0.name: "count" } + - match: { columns.0.type: "long" } + - match: { columns.1.name: "description" } + - match: { columns.1.type: "keyword" } + + - length: { values: 1 } + - match: { values.0: [ 20, null ] } diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java index b8c64f945db0a..654cf494e0e6f 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistryTests.java @@ -56,7 +56,7 @@ public void testThatTemplatesAreDeprecated() { registry.getIngestPipelines() .stream() .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) - .map(PipelineConfiguration::getConfigAsMap) + .map(PipelineConfiguration::getConfig) .forEach(p -> assertTrue((Boolean) p.get("deprecated"))); } diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 35e81f6f4c8c7..a8043f3d5e4e5 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -516,7 +516,7 @@ public void testThatTemplatesAreNotDeprecated() { registry.getIngestPipelines() .stream() .map(ipc -> new PipelineConfiguration(ipc.getId(), ipc.loadConfig(), XContentType.JSON)) - .map(PipelineConfiguration::getConfigAsMap) + .map(PipelineConfiguration::getConfig) .forEach(p -> assertFalse((Boolean) p.get("deprecated"))); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 1691a464d8061..99fb626ad9474 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -16,11 +16,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -96,18 +98,18 @@ public static void main(String[] args) throws Exception { ); System.out.println("and heap_max=" + JvmInfo.jvmInfo().getMem().getHeapMax()); + Environment internalNodeEnv = InternalSettingsPreparer.prepareEnvironment( + Settings.builder().put(SETTINGS).put("node.data", false).build(), + emptyMap(), + null, + () -> { + throw new IllegalArgumentException("settings must have [node.name]"); + } + ); + // First clean everything and index the watcher (but not via put alert api!) try ( - Node node = new Node( - InternalSettingsPreparer.prepareEnvironment( - Settings.builder().put(SETTINGS).put("node.data", false).build(), - emptyMap(), - null, - () -> { - throw new IllegalArgumentException("settings must have [node.name]"); - } - ) - ).start() + Node node = new Node(internalNodeEnv, new PluginsLoader(internalNodeEnv.modulesFile(), internalNodeEnv.pluginsFile())).start() ) { final Client client = node.client(); ClusterHealthResponse response = client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get(); diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index a56ddaabe8280..fe4c1c20c69c4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -23,13 +23,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestSearchAction; -import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -53,8 +50,6 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.TimeValue.timeValueSeconds; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.upgrades.FullClusterRestartIT.assertNumHits; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -295,10 +290,6 @@ public void testWatcherWithApiKey() throws Exception { } public void testServiceAccountApiKey() throws IOException { - @UpdateForV9(owner = UpdateForV9.Owner.SECURITY) - var originalClusterSupportsServiceAccounts = oldClusterHasFeature(RestTestLegacyFeatures.SERVICE_ACCOUNTS_SUPPORTED); - assumeTrue("no service accounts in versions before 7.13", originalClusterSupportsServiceAccounts); - if (isRunningAgainstOldCluster()) { final Request createServiceTokenRequest = new Request("POST", "/_security/service/elastic/fleet-server/credential/token"); final Response createServiceTokenResponse = client().performRequest(createServiceTokenRequest); @@ -484,10 +475,6 @@ public void testRollupAfterRestart() throws Exception { } public void testTransformLegacyTemplateCleanup() throws Exception { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - var originalClusterSupportsTransform = oldClusterHasFeature(RestTestLegacyFeatures.TRANSFORM_SUPPORTED); - assumeTrue("Before 7.2 transforms didn't exist", originalClusterSupportsTransform); - if (isRunningAgainstOldCluster()) { // create the source index @@ -562,9 +549,6 @@ public void testTransformLegacyTemplateCleanup() throws Exception { } public void testSlmPolicyAndStats() throws IOException { - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterSupportsSlm = oldClusterHasFeature(RestTestLegacyFeatures.SLM_SUPPORTED); - SnapshotLifecyclePolicy slmPolicy = new SnapshotLifecyclePolicy( "test-policy", "test-policy", @@ -573,7 +557,7 @@ public void testSlmPolicyAndStats() throws IOException { Collections.singletonMap("indices", Collections.singletonList("*")), null ); - if (isRunningAgainstOldCluster() && originalClusterSupportsSlm) { + if (isRunningAgainstOldCluster()) { Request createRepoRequest = new Request("PUT", "_snapshot/test-repo"); String repoCreateJson = "{" + " \"type\": \"fs\"," + " \"settings\": {" + " \"location\": \"test-repo\"" + " }" + "}"; createRepoRequest.setJsonEntity(repoCreateJson); @@ -587,7 +571,7 @@ public void testSlmPolicyAndStats() throws IOException { client().performRequest(createSlmPolicyRequest); } - if (isRunningAgainstOldCluster() == false && originalClusterSupportsSlm) { + if (isRunningAgainstOldCluster() == false) { Request getSlmPolicyRequest = new Request("GET", "_slm/policy/test-policy"); Response response = client().performRequest(getSlmPolicyRequest); Map responseMap = entityAsMap(response); @@ -914,14 +898,6 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS @SuppressWarnings("unchecked") public void testDataStreams() throws Exception { - - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterSupportsDataStreams = oldClusterHasFeature(RestTestLegacyFeatures.DATA_STREAMS_SUPPORTED); - - @UpdateForV9(owner = UpdateForV9.Owner.DATA_MANAGEMENT) - var originalClusterDataStreamHasDateInIndexName = oldClusterHasFeature(RestTestLegacyFeatures.NEW_DATA_STREAMS_INDEX_NAME_FORMAT); - - assumeTrue("no data streams in versions before 7.9.0", originalClusterSupportsDataStreams); if (isRunningAgainstOldCluster()) { createComposableTemplate(client(), "dst", "ds"); @@ -958,89 +934,10 @@ public void testDataStreams() throws Exception { List> indices = (List>) ds.get("indices"); assertEquals("ds", ds.get("name")); assertEquals(1, indices.size()); - assertEquals( - DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp, originalClusterDataStreamHasDateInIndexName), - indices.get(0).get("index_name") - ); + assertEquals(DataStreamTestHelper.getLegacyDefaultBackingIndexName("ds", 1, timestamp), indices.get(0).get("index_name")); assertNumHits("ds", 1, 1); } - /** - * Tests that a single document survives. Super basic smoke test. - */ - @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) // Can be removed - public void testDisableFieldNameField() throws IOException { - assumeFalse( - "can only disable field names field before 8.0", - oldClusterHasFeature(RestTestLegacyFeatures.DISABLE_FIELD_NAMES_FIELD_REMOVED) - ); - - String docLocation = "/nofnf/_doc/1"; - String doc = """ - { - "dv": "test", - "no_dv": "test" - }"""; - - if (isRunningAgainstOldCluster()) { - Request createIndex = new Request("PUT", "/nofnf"); - createIndex.setJsonEntity(""" - { - "settings": { - "index": { - "number_of_replicas": 1 - } - }, - "mappings": { - "_field_names": { "enabled": false }, - "properties": { - "dv": { "type": "keyword" }, - "no_dv": { "type": "keyword", "doc_values": false } - } - } - }"""); - createIndex.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(fieldNamesFieldOk())); - client().performRequest(createIndex); - - Request createDoc = new Request("PUT", docLocation); - createDoc.addParameter("refresh", "true"); - createDoc.setJsonEntity(doc); - client().performRequest(createDoc); - } - - Request getRequest = new Request("GET", docLocation); - assertThat(toStr(client().performRequest(getRequest)), containsString(doc)); - - if (isRunningAgainstOldCluster() == false) { - Request esql = new Request("POST", "_query"); - esql.setJsonEntity(""" - { - "query": "FROM nofnf | LIMIT 1" - }"""); - // {"columns":[{"name":"dv","type":"keyword"},{"name":"no_dv","type":"keyword"}],"values":[["test",null]]} - try { - Map result = entityAsMap(client().performRequest(esql)); - MapMatcher mapMatcher = matchesMap(); - if (result.get("took") != null) { - mapMatcher = mapMatcher.entry("took", ((Integer) result.get("took")).intValue()); - } - assertMap( - result, - mapMatcher.entry( - "columns", - List.of(Map.of("name", "dv", "type", "keyword"), Map.of("name", "no_dv", "type", "keyword")) - ).entry("values", List.of(List.of("test", "test"))) - ); - } catch (ResponseException e) { - logger.error( - "failed to query index without field name field. Existing indices:\n{}", - EntityUtils.toString(client().performRequest(new Request("GET", "_cat/indices")).getEntity()) - ); - throw e; - } - } - } - /** * Ignore the warning about the {@code _field_names} field. We intentionally * turn that field off sometimes. And other times old versions spuriously diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java index c825de31a7f6e..91820299da8a5 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlConfigIndexMappingsFullClusterRestartIT.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; @@ -48,11 +47,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java index 7dc0a2f48bbc9..a83ad5b4f8da4 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlHiddenIndicesFullClusterRestartIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -64,11 +63,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java index 0b15e98f201a0..74f62fac26488 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MlMigrationFullClusterRestartIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.upgrades.FullClusterRestartUpgradeStatus; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; @@ -58,11 +57,7 @@ protected Settings restClientSettings() { public void waitForMlTemplates() throws Exception { // We shouldn't wait for ML templates during the upgrade - production won't if (isRunningAgainstOldCluster()) { - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterHasFeature(RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED) - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index 8a775c7f7d3d8..25e8aed73bda2 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.rest.ObjectPath; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.transport.RemoteClusterPortSettings; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -118,20 +117,18 @@ public void testCreatingAndUpdatingApiKeys() throws Exception { ); RestClient client = client(); - if (isUpdateApiSupported(client)) { - var updateException = expectThrows( - Exception.class, - () -> updateOrBulkUpdateApiKey(client, apiKey.v1(), randomRoleDescriptors(true)) - ); + var updateException = expectThrows( + Exception.class, + () -> updateOrBulkUpdateApiKey(client, apiKey.v1(), randomRoleDescriptors(true)) + ); - assertThat( - updateException.getMessage(), - anyOf( - containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), - containsString("remote indices not supported for API keys") - ) - ); - } + assertThat( + updateException.getMessage(), + anyOf( + containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), + containsString("remote indices not supported for API keys") + ) + ); } case MIXED -> { try { @@ -145,20 +142,18 @@ public void testCreatingAndUpdatingApiKeys() throws Exception { // fail when remote_indices are provided: // against old node - if (isUpdateApiSupported(oldVersionClient)) { - Exception e = expectThrows( - Exception.class, - () -> updateOrBulkUpdateApiKey(oldVersionClient, apiKey.v1(), randomRoleDescriptors(true)) - ); - assertThat( - e.getMessage(), - anyOf( - containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), - containsString("remote indices not supported for API keys") - ) - ); - } - Exception e = expectThrows(Exception.class, () -> createOrGrantApiKey(oldVersionClient, randomRoleDescriptors(true))); + Exception e = expectThrows( + Exception.class, + () -> updateOrBulkUpdateApiKey(oldVersionClient, apiKey.v1(), randomRoleDescriptors(true)) + ); + assertThat( + e.getMessage(), + anyOf( + containsString("failed to parse role [my_role]. unexpected field [remote_indices]"), + containsString("remote indices not supported for API keys") + ) + ); + e = expectThrows(Exception.class, () -> createOrGrantApiKey(oldVersionClient, randomRoleDescriptors(true))); assertThat( e.getMessage(), anyOf( @@ -263,28 +258,9 @@ private void updateOrBulkUpdateApiKey(String id, String roles) throws IOExceptio updateOrBulkUpdateApiKey(client(), id, roles); } - private boolean isUpdateApiSupported(RestClient client) { - return switch (CLUSTER_TYPE) { - case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY); // Update API was introduced in 8.4.0. - case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_UPDATE_API_KEY) || client == newVersionClient; - case UPGRADED -> true; - }; - } - - private boolean isBulkUpdateApiSupported(RestClient client) { - return switch (CLUSTER_TYPE) { - case OLD -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY); // Bulk update API was introduced in 8.5.0. - case MIXED -> clusterHasFeature(RestTestLegacyFeatures.SECURITY_BULK_UPDATE_API_KEY) || client == newVersionClient; - case UPGRADED -> true; - }; - } - private void updateOrBulkUpdateApiKey(RestClient client, String id, String roles) throws IOException { - if (false == isUpdateApiSupported(client)) { - return; // Update API is not supported. - } final Request updateApiKeyRequest; - final boolean bulkUpdate = randomBoolean() && isBulkUpdateApiSupported(client); + final boolean bulkUpdate = randomBoolean(); if (bulkUpdate) { updateApiKeyRequest = new Request("POST", "_security/api_key/_bulk_update"); updateApiKeyRequest.setJsonEntity(org.elasticsearch.common.Strings.format(""" diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index 74165eeb07b8a..aa166311f6465 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -83,11 +82,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { - assertNewMemoryFormat("new_memory_format"); - } else { - assertOldMemoryFormat("new_memory_format"); - } + assertNewMemoryFormat("new_memory_format"); } case MIXED -> { ensureHealth(".ml-inference-*,.ml-config*", (request -> { @@ -99,12 +94,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { - assertNewMemoryFormat("new_memory_format"); - } else { - assertOldMemoryFormat("new_memory_format"); - } - + assertNewMemoryFormat("new_memory_format"); } case UPGRADED -> { ensureHealth(".ml-inference-*,.ml-config*", (request -> { @@ -137,14 +127,12 @@ private void waitForDeploymentStarted(String modelId) throws Exception { @SuppressWarnings("unchecked") private void assertOldMemoryFormat(String modelId) throws Exception { - // There was a change in the MEMORY_OVERHEAD value in 8.3.0, see #86416 - long memoryOverheadMb = clusterHasFeature(RestTestLegacyFeatures.ML_MEMORY_OVERHEAD_FIXED) ? 240 : 270; var response = getTrainedModelStats(modelId); Map map = entityAsMap(response); List> stats = (List>) map.get("trained_model_stats"); assertThat(stats, hasSize(1)); var stat = stats.get(0); - Long expectedMemoryUsage = ByteSizeValue.ofMb(memoryOverheadMb).getBytes() + RAW_MODEL_SIZE * 2; + Long expectedMemoryUsage = ByteSizeValue.ofMb(240).getBytes() + RAW_MODEL_SIZE * 2; Integer actualMemoryUsage = (Integer) XContentMapValues.extractValue("model_size_stats.required_native_memory_bytes", stat); assertThat( Strings.format("Memory usage mismatch for the model %s in cluster state %s", modelId, CLUSTER_TYPE.toString()), diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index d7d2676163851..0c57baad1a09b 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.rest.RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -43,12 +42,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa public void waitForTemplates() throws Exception { if (AbstractUpgradeTestCase.CLUSTER_TYPE == AbstractUpgradeTestCase.ClusterType.OLD) { try { - boolean clusterUnderstandsComposableTemplates = clusterHasFeature(COMPONENT_TEMPLATE_SUPPORTED); - XPackRestTestHelper.waitForTemplates( - client(), - XPackRestTestConstants.ML_POST_V7120_TEMPLATES, - clusterUnderstandsComposableTemplates - ); + XPackRestTestHelper.waitForTemplates(client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES); } catch (AssertionError e) { throw new AssertionError("Failure in test setup: Failed to initialize ML index templates", e); } diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java index 31b74b8706877..88b01defb1a86 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.xcontent.json.JsonXContent; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -34,32 +33,27 @@ private XPackRestTestHelper() {} * @throws InterruptedException If the wait is interrupted */ @SuppressWarnings("unchecked") - public static void waitForTemplates(RestClient client, List expectedTemplates, boolean clusterUnderstandsComposableTemplates) - throws Exception { + public static void waitForTemplates(RestClient client, List expectedTemplates) throws Exception { // TODO: legacy support can be removed once all X-Pack plugins use only composable // templates in the oldest version we test upgrades from assertBusy(() -> { Map response; - if (clusterUnderstandsComposableTemplates) { - final Request request = new Request("GET", "_index_template"); - request.addParameter("error_trace", "true"); + final Request request = new Request("GET", "_index_template"); + request.addParameter("error_trace", "true"); - String string = EntityUtils.toString(client.performRequest(request).getEntity()); - List> templateList = (List>) XContentHelper.convertToMap( - JsonXContent.jsonXContent, - string, - false - ).get("index_templates"); - response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template"))); - } else { - response = Collections.emptyMap(); - } + String string = EntityUtils.toString(client.performRequest(request).getEntity()); + List> templateList = (List>) XContentHelper.convertToMap( + JsonXContent.jsonXContent, + string, + false + ).get("index_templates"); + response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template"))); final Set templates = new TreeSet<>(response.keySet()); final Request legacyRequest = new Request("GET", "_template"); legacyRequest.addParameter("error_trace", "true"); - String string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity()); + string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity()); Map legacyResponse = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false); final Set legacyTemplates = new TreeSet<>(legacyResponse.keySet());